source: trunk/source/lisp-kernel/arm-exceptions.c

Last change on this file was 16767, checked in by gb, 3 years ago

If pc_luser_xp() changes a thread's context, set a flag bit in the
target tcr. if that bit is set in a subsequent synchronous signal.
effectively ignore that signal and clear the bit. Would rather not
have to do this.

some other internal changes .

ticket:1257 is still open.

File size: 52.2 KB
Line 
1/*
2 * Copyright 2010 Clozure Associates
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "lisp.h"
18#include "lisp-exceptions.h"
19#include "lisp_globals.h"
20#include <ctype.h>
21#include <stdio.h>
22#include <stddef.h>
23#include <string.h>
24#include <stdarg.h>
25#include <errno.h>
26#include <stdio.h>
27#ifdef LINUX
28#include <strings.h>
29#include <sys/mman.h>
30#ifndef ANDROID
31#include <fpu_control.h>
32#include <linux/prctl.h>
33#endif
34#endif
35
36#ifdef DARWIN
37#include <sys/mman.h>
38#ifndef SA_NODEFER
39#define SA_NODEFER 0
40#endif
41#include <sysexits.h>
42
43
44/* a distinguished UUO at a distinguished address */
45extern void pseudo_sigreturn(ExceptionInformation *);
46#endif
47
48
49#include "threads.h"
50
51#ifdef ANDROID
52#define pthread_sigmask(how,in,out) rt_sigprocmask(how,in,out,8)
53#endif
54
55#ifdef LINUX
56
57void
58enable_fp_exceptions()
59{
60}
61
62void
63disable_fp_exceptions()
64{
65}
66#endif
67
68/*
69  Handle exceptions.
70
71*/
72
73extern LispObj lisp_nil;
74
75extern natural lisp_heap_gc_threshold;
76extern Boolean grow_dynamic_area(natural);
77
78Boolean allocation_enabled = true;
79
80Boolean
81did_gc_notification_since_last_full_gc = false;
82
83
84
85
86int
87page_size = 4096;
88
89int
90log2_page_size = 12;
91
92
93
94
95
96/*
97  If the PC is pointing to an allocation trap, the previous instruction
98  must have decremented allocptr.  Return the non-zero amount by which
99  allocptr was decremented.
100*/
101signed_natural
102allocptr_displacement(ExceptionInformation *xp)
103{
104  pc program_counter = xpPC(xp);
105  opcode instr = *program_counter, prev_instr;
106  int delta = -3;
107
108  if (IS_ALLOC_TRAP(instr)) {
109    /* The alloc trap must have been preceded by a cmp and a
110       load from tcr.allocbase. */
111    if (IS_BRANCH_AROUND_ALLOC_TRAP(program_counter[-1])) {
112      delta = -4;
113    }
114    prev_instr = program_counter[delta];
115
116    if (IS_SUB_RM_FROM_ALLOCPTR(prev_instr)) {
117      return -((signed_natural)xpGPR(xp,RM_field(prev_instr)));
118    }
119   
120    if (IS_SUB_LO_FROM_ALLOCPTR(prev_instr)) {
121      return -((signed_natural)(prev_instr & 0xff));
122    }
123
124    if (IS_SUB_FROM_ALLOCPTR(prev_instr)) {
125      natural disp = ror(prev_instr&0xff,(prev_instr&0xf00)>>7);
126
127      instr = program_counter[delta-1];
128      if (IS_SUB_LO_FROM_ALLOCPTR(instr)) {
129        return -((signed_natural)(disp | (instr & 0xff)));
130      }
131    }
132    Bug(xp, "Can't determine allocation displacement");
133  }
134  return 0;
135}
136
137
138/*
139  A cons cell's been successfully allocated, but the allocptr's
140  still tagged (as fulltag_cons, of course.)  Emulate any instructions
141  that might follow the allocation (stores to the car or cdr, an
142  assignment to the "result" gpr) that take place while the allocptr's
143  tag is non-zero, advancing over each such instruction.  When we're
144  done, the cons cell will be allocated and initialized, the result
145  register will point to it, the allocptr will be untagged, and
146  the PC will point past the instruction that clears the allocptr's
147  tag.
148*/
149void
150finish_allocating_cons(ExceptionInformation *xp)
151{
152  pc program_counter = xpPC(xp);
153  opcode instr;
154  LispObj cur_allocptr = xpGPR(xp, allocptr);
155  cons *c = (cons *)ptr_from_lispobj(untag(cur_allocptr));
156  int target_reg;
157  while (1) {
158    instr = *program_counter;
159
160    if (IS_CLR_ALLOCPTR_TAG(instr)) {
161      xpGPR(xp, allocptr) = untag(cur_allocptr);
162      xpPC(xp) = program_counter+1;
163      return;
164    } else if (IS_SET_ALLOCPTR_CAR_RD(instr)) {
165      c->car = xpGPR(xp,RD_field(instr));
166    } else if (IS_SET_ALLOCPTR_CDR_RD(instr)) {
167      c->cdr = xpGPR(xp,RD_field(instr));
168    } else if (IS_SET_ALLOCPTR_RESULT_RD(instr)) {
169      xpGPR(xp,RD_field(instr)) = cur_allocptr;
170    } else {
171      Bug(xp, "huh?");
172    }
173    program_counter++; 
174  }
175
176}
177
178/*
179  We were interrupted in the process of allocating a uvector; we
180  survived the allocation trap, and allocptr is tagged as fulltag_misc.
181  Emulate any instructions which store a header into the uvector,
182  assign the value of allocptr to some other register, and clear
183  allocptr's tag.  Don't expect/allow any other instructions in
184  this environment.
185*/
186void
187finish_allocating_uvector(ExceptionInformation *xp)
188{
189  pc program_counter = xpPC(xp);
190  opcode instr;
191  LispObj cur_allocptr = xpGPR(xp, allocptr);
192  int target_reg;
193
194  while (1) {
195    instr = *program_counter++;
196    if (IS_CLR_ALLOCPTR_TAG(instr)) {
197      xpGPR(xp, allocptr) = untag(cur_allocptr);
198      xpPC(xp) = program_counter;
199      return;
200    }
201    if (IS_SET_ALLOCPTR_HEADER_RD(instr)) {
202      header_of(cur_allocptr) = xpGPR(xp,RD_field(instr));
203    } else if (IS_SET_ALLOCPTR_RESULT_RD(instr)) {
204      xpGPR(xp,RD_field(instr)) = cur_allocptr;
205    } else {
206      Bug(xp, "Unexpected instruction following alloc trap at " LISP ":",program_counter);
207    }
208  }
209}
210
211
212Boolean
213allocate_object(ExceptionInformation *xp,
214                natural bytes_needed, 
215                signed_natural disp_from_allocptr,
216                TCR *tcr,
217                Boolean *crossed_threshold)
218{
219  area *a = active_dynamic_area;
220
221  /* Maybe do an EGC */
222  if (a->older && lisp_global(OLDEST_EPHEMERAL)) {
223    if (((a->active)-(a->low)) >= a->threshold) {
224      gc_from_xp(xp, 0L);
225    }
226  }
227
228  /* Life is pretty simple if we can simply grab a segment
229     without extending the heap.
230  */
231  if (new_heap_segment(xp, bytes_needed, false, tcr, crossed_threshold)) {
232    xpGPR(xp, allocptr) += disp_from_allocptr;
233    return true;
234  }
235 
236  /* It doesn't make sense to try a full GC if the object
237     we're trying to allocate is larger than everything
238     allocated so far.
239  */
240  if ((lisp_global(HEAP_END)-lisp_global(HEAP_START)) > bytes_needed) {
241    untenure_from_area(tenured_area); /* force a full GC */
242    gc_from_xp(xp, 0L);
243    did_gc_notification_since_last_full_gc = false;
244  }
245 
246  /* Try again, growing the heap if necessary */
247  if (new_heap_segment(xp, bytes_needed, true, tcr, NULL)) {
248    xpGPR(xp, allocptr) += disp_from_allocptr;
249    return true;
250  }
251 
252  return false;
253}
254
255#ifndef XNOMEM
256#define XNOMEM 10
257#endif
258
259void
260update_bytes_allocated(TCR* tcr, void *cur_allocptr)
261{
262  BytePtr
263    last = (BytePtr) tcr->last_allocptr, 
264    current = (BytePtr) cur_allocptr;
265  if (last && (cur_allocptr != ((void *)VOID_ALLOCPTR))) {
266    tcr->bytes_allocated += last-current;
267  }
268  tcr->last_allocptr = 0;
269}
270
271void
272lisp_allocation_failure(ExceptionInformation *xp, TCR *tcr, natural bytes_needed)
273{
274  /* Couldn't allocate the object.  If it's smaller than some arbitrary
275     size (say 128K bytes), signal a "chronically out-of-memory" condition;
276     else signal a "allocation request failed" condition.
277  */
278  xpGPR(xp,allocptr) = xpGPR(xp,allocbase) = VOID_ALLOCPTR;
279  handle_error(xp, bytes_needed < (128<<10) ? XNOMEM : error_alloc_failed,0, NULL);
280}
281
282void
283callback_for_gc_notification(ExceptionInformation *xp, TCR *tcr)
284{
285  LispObj cmain = nrs_CMAIN.vcell;
286
287  did_gc_notification_since_last_full_gc = true;
288  if ((fulltag_of(cmain) == fulltag_misc) &&
289      (header_subtag(header_of(cmain)) == subtag_macptr)) {
290    callback_to_lisp(cmain,xp,SIGTRAP,0,NULL);
291  }
292}
293
294/*
295  Allocate a large list, where "large" means "large enough to
296  possibly trigger the EGC several times if this was done
297  by individually allocating each CONS."  The number of
298  ocnses in question is in arg_z; on successful return,
299  the list will be in arg_z
300*/
301
302Boolean
303allocate_list(ExceptionInformation *xp, TCR *tcr)
304{
305  natural
306    nconses = (unbox_fixnum(xpGPR(xp,arg_z))),
307    bytes_needed = (nconses << dnode_shift);
308  LispObj
309    prev = lisp_nil,
310    current,
311    initial = xpGPR(xp,arg_y);
312  Boolean notify_pending_gc = false;
313
314  if (nconses == 0) {
315    /* Silly case */
316    xpGPR(xp,arg_z) = lisp_nil;
317    return true;
318  }
319  update_bytes_allocated(tcr, (void *)(void *) tcr->save_allocptr);
320  if (allocate_object(xp,bytes_needed,(-bytes_needed)+fulltag_cons,tcr,&notify_pending_gc)) {
321    for (current = xpGPR(xp,allocptr);
322         nconses;
323         prev = current, current+= dnode_size, nconses--) {
324      deref(current,0) = prev;
325      deref(current,1) = initial;
326    }
327    xpGPR(xp,arg_z) = prev;
328    xpGPR(xp,arg_y) = xpGPR(xp,allocptr);
329    xpGPR(xp,allocptr)-=fulltag_cons;
330    if (notify_pending_gc && !did_gc_notification_since_last_full_gc) {
331      callback_for_gc_notification(xp,tcr);
332    }
333  } else {
334    lisp_allocation_failure(xp,tcr,bytes_needed);
335  }
336  return true;
337}
338
339Boolean
340handle_alloc_trap(ExceptionInformation *xp, TCR *tcr, Boolean *notify)
341{
342  pc program_counter;
343  natural cur_allocptr, bytes_needed = 0;
344  opcode prev_instr;
345  signed_natural disp = 0;
346  unsigned allocptr_tag;
347
348  if (!allocation_enabled) {
349    /* Back up before the alloc_trap, then let pc_luser_xp() back
350       up some more. */
351    xpPC(xp)-=1;
352    pc_luser_xp(xp,tcr, NULL);
353    allocation_enabled = true;
354    tcr->save_allocbase = (void *)VOID_ALLOCPTR;
355    handle_error(xp, error_allocation_disabled,0,NULL);
356    return true;
357  }
358
359  cur_allocptr = xpGPR(xp,allocptr);
360
361  allocptr_tag = fulltag_of(cur_allocptr);
362
363  switch (allocptr_tag) {
364  case fulltag_cons:
365    bytes_needed = sizeof(cons);
366    disp = -sizeof(cons) + fulltag_cons;
367    break;
368
369  case fulltag_misc:
370    disp = allocptr_displacement(xp);
371    bytes_needed = (-disp) + fulltag_misc;
372    break;
373
374    /* else fall thru */
375  default:
376    return false;
377  }
378
379  update_bytes_allocated(tcr,((BytePtr)(cur_allocptr-disp)));
380  if (allocate_object(xp, bytes_needed, disp, tcr, notify)) {
381    adjust_exception_pc(xp,4);
382    if (notify && *notify) {
383      pc_luser_xp(xp,tcr,NULL);
384      callback_for_gc_notification(xp,tcr);
385    }
386    return true;
387  }
388  lisp_allocation_failure(xp,tcr,bytes_needed);
389  return true;
390}
391
392natural gc_deferred = 0, full_gc_deferred = 0;
393
394signed_natural
395flash_freeze(TCR *tcr, signed_natural param)
396{
397  return 0;
398}
399
400Boolean
401handle_gc_trap(ExceptionInformation *xp, TCR *tcr)
402{
403  LispObj
404    selector = xpGPR(xp,imm0), 
405    arg = xpGPR(xp,imm1);
406  area *a = active_dynamic_area;
407  Boolean egc_was_enabled = (a->older != NULL);
408  natural gc_previously_deferred = gc_deferred;
409
410
411  switch (selector) {
412  case GC_TRAP_FUNCTION_EGC_CONTROL:
413    egc_control(arg != 0, a->active);
414    xpGPR(xp,arg_z) = lisp_nil + (egc_was_enabled ? t_offset : 0);
415    break;
416
417  case GC_TRAP_FUNCTION_CONFIGURE_EGC:
418    a->threshold = unbox_fixnum(xpGPR(xp, arg_x));
419    g1_area->threshold = unbox_fixnum(xpGPR(xp, arg_y));
420    g2_area->threshold = unbox_fixnum(xpGPR(xp, arg_z));
421    xpGPR(xp,arg_z) = lisp_nil+t_offset;
422    break;
423
424  case GC_TRAP_FUNCTION_SET_LISP_HEAP_THRESHOLD:
425    if (((signed_natural) arg) > 0) {
426      lisp_heap_gc_threshold = 
427        align_to_power_of_2((arg-1) +
428                            (heap_segment_size - 1),
429                            log2_heap_segment_size);
430    }
431    /* fall through */
432  case GC_TRAP_FUNCTION_GET_LISP_HEAP_THRESHOLD:
433    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
434    break;
435
436  case GC_TRAP_FUNCTION_USE_LISP_HEAP_THRESHOLD:
437    /*  Try to put the current threshold in effect.  This may
438        need to disable/reenable the EGC. */
439    untenure_from_area(tenured_area);
440    resize_dynamic_heap(a->active,lisp_heap_gc_threshold);
441    if (egc_was_enabled) {
442      if ((a->high - a->active) >= a->threshold) {
443        tenure_to_area(tenured_area);
444      }
445    }
446    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
447    break;
448
449  case GC_TRAP_FUNCTION_SET_GC_NOTIFICATION_THRESHOLD:
450    if ((signed_natural)arg >= 0) {
451      lisp_heap_notify_threshold = arg;
452      did_gc_notification_since_last_full_gc = false;
453    }
454    /* fall through */
455
456  case GC_TRAP_FUNCTION_GET_GC_NOTIFICATION_THRESHOLD:
457    xpGPR(xp, imm0) = lisp_heap_notify_threshold;
458    break;
459
460
461  case GC_TRAP_FUNCTION_ENSURE_STATIC_CONSES:
462    ensure_static_conses(xp,tcr,32768);
463    break;
464
465  case GC_TRAP_FUNCTION_FLASH_FREEZE:
466    untenure_from_area(tenured_area);
467    gc_like_from_xp(xp,flash_freeze,0);
468    a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
469    tenured_area->static_dnodes = area_dnode(a->active, a->low);
470    if (egc_was_enabled) {
471      tenure_to_area(tenured_area);
472    }
473    xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
474    break;
475
476  case GC_TRAP_FUNCTION_ALLOCATION_CONTROL:
477    switch(arg) {
478    case 0: /* disable if allocation enabled */
479      xpGPR(xp, arg_z) = lisp_nil;
480      if (allocation_enabled) {
481        TCR *other_tcr;
482        ExceptionInformation *other_context;
483        suspend_other_threads(true);
484        normalize_tcr(xp,tcr,false);
485        for (other_tcr=tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
486          other_context = other_tcr->pending_exception_context;
487          if (other_context == NULL) {
488            other_context = other_tcr->suspend_context;
489          }
490          normalize_tcr(other_context, other_tcr, true);
491        }
492        allocation_enabled = false;
493        xpGPR(xp, arg_z) = t_value;
494        resume_other_threads(true);
495      }
496      break;
497
498    case 1:                     /* enable if disabled */
499      xpGPR(xp, arg_z) = lisp_nil;
500      if (!allocation_enabled) {
501        allocation_enabled = true;
502        xpGPR(xp, arg_z) = t_value;
503      }
504      break;
505
506    default:
507      xpGPR(xp, arg_z) = lisp_nil;
508      if (allocation_enabled) {
509        xpGPR(xp, arg_z) = t_value;
510      }
511      break;
512    }
513    break;
514
515       
516  default:
517    update_bytes_allocated(tcr, (void *) ptr_from_lispobj(xpGPR(xp, allocptr)));
518
519    if (selector == GC_TRAP_FUNCTION_IMMEDIATE_GC) {
520      if (!full_gc_deferred) {
521        gc_from_xp(xp, 0L);
522        break;
523      }
524      /* Tried to do a full GC when gc was disabled.  That failed,
525         so try full GC now */
526      selector = GC_TRAP_FUNCTION_GC;
527    }
528   
529    if (egc_was_enabled) {
530      egc_control(false, (BytePtr) a->active);
531    }
532    gc_from_xp(xp, 0L);
533    if (gc_deferred > gc_previously_deferred) {
534      full_gc_deferred = 1;
535    } else {
536      full_gc_deferred = 0;
537    }
538    if (selector > GC_TRAP_FUNCTION_GC) {
539      if (selector & GC_TRAP_FUNCTION_IMPURIFY) {
540        impurify_from_xp(xp, 0L);
541        /*        nrs_GC_EVENT_STATUS_BITS.vcell |= gc_integrity_check_bit; */
542        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
543        gc_from_xp(xp, 0L);
544      }
545      if (selector & GC_TRAP_FUNCTION_PURIFY) {
546        purify_from_xp(xp, 0L);
547        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
548        gc_from_xp(xp, 0L);
549      }
550      if (selector & GC_TRAP_FUNCTION_SAVE_APPLICATION) {
551        OSErr err;
552        extern OSErr save_application(unsigned, Boolean);
553        TCR *tcr = get_tcr(true);
554        area *vsarea = tcr->vs_area;
555       
556        nrs_TOPLFUNC.vcell = *((LispObj *)(vsarea->high)-1);
557        err = save_application(arg, egc_was_enabled);
558        if (err == noErr) {
559          _exit(0);
560        }
561        fatal_oserr(": save_application", err);
562      }
563      switch (selector) {
564
565
566      case GC_TRAP_FUNCTION_FREEZE:
567        a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
568        tenured_area->static_dnodes = area_dnode(a->active, a->low);
569        xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
570        break;
571      default:
572        break;
573      }
574    }
575   
576    if (egc_was_enabled) {
577      egc_control(true, NULL);
578    }
579    break;
580   
581  }
582
583  adjust_exception_pc(xp,4);
584  return true;
585}
586
587
588
589void
590signal_stack_soft_overflow(ExceptionInformation *xp, unsigned reg)
591{
592  /* The cstack just overflowed.  Force the current thread's
593     control stack to do so until all stacks are well under their overflow
594     limits.
595  */
596
597#if 0
598  lisp_global(CS_OVERFLOW_LIMIT) = CS_OVERFLOW_FORCE_LIMIT; /* force unsigned traps to fail */
599#endif
600  handle_error(xp, error_stack_overflow, reg, NULL);
601}
602
603/*
604  Lower (move toward 0) the "end" of the soft protected area associated
605  with a by a page, if we can.
606*/
607
608void
609adjust_soft_protection_limit(area *a)
610{
611  char *proposed_new_soft_limit = a->softlimit - 4096;
612  protected_area_ptr p = a->softprot;
613 
614  if (proposed_new_soft_limit >= (p->start+16384)) {
615    p->end = proposed_new_soft_limit;
616    p->protsize = p->end-p->start;
617    a->softlimit = proposed_new_soft_limit;
618  }
619  protect_area(p);
620}
621
622void
623restore_soft_stack_limit(unsigned stkreg)
624{
625  area *a;
626  TCR *tcr = get_tcr(true);
627
628  switch (stkreg) {
629  case Rsp:
630    a = tcr->cs_area;
631    if ((a->softlimit - 4096) > (a->hardlimit + 16384)) {
632      a->softlimit -= 4096;
633    }
634    tcr->cs_limit = (LispObj)ptr_to_lispobj(a->softlimit);
635    break;
636  case vsp:
637    a = tcr->vs_area;
638    adjust_soft_protection_limit(a);
639    break;
640  }
641}
642
643/* Maybe this'll work someday.  We may have to do something to
644   make the thread look like it's not handling an exception */
645void
646reset_lisp_process(ExceptionInformation *xp)
647{
648#if 0
649  TCR *tcr = get_tcr(true);
650  catch_frame *last_catch = (catch_frame *) ptr_from_lispobj(untag(tcr->catch_top));
651
652  tcr->save_allocptr = (void *) ptr_from_lispobj(xpGPR(xp, allocptr));
653
654  tcr->save_vsp = (LispObj *) ptr_from_lispobj(((lisp_frame *)ptr_from_lispobj(last_catch->csp))->savevsp);
655
656  start_lisp(tcr, 1);
657#endif
658}
659
660
661
662void
663platform_new_heap_segment(ExceptionInformation *xp, TCR *tcr, BytePtr low, BytePtr high)
664{
665  tcr->last_allocptr = (void *)high;
666  xpGPR(xp,allocptr) = (LispObj) high;
667  tcr->save_allocbase = (void*) low;
668}
669 
670void
671update_area_active (area **aptr, BytePtr value)
672{
673  area *a = *aptr;
674  for (; a; a = a->older) {
675    if ((a->low <= value) && (a->high >= value)) break;
676  };
677  if (a == NULL) Bug(NULL, "Can't find active area");
678  a->active = value;
679  *aptr = a;
680
681  for (a = a->younger; a; a = a->younger) {
682    a->active = a->high;
683  }
684}
685
686LispObj *
687tcr_frame_ptr(TCR *tcr)
688{
689  ExceptionInformation *xp;
690  LispObj *bp = NULL;
691
692  if (tcr->pending_exception_context)
693    xp = tcr->pending_exception_context;
694  else {
695    xp = tcr->suspend_context;
696  }
697  if (xp) {
698    bp = (LispObj *) xpGPR(xp, Rsp);
699  }
700  return bp;
701}
702
703void
704normalize_tcr(ExceptionInformation *xp, TCR *tcr, Boolean is_other_tcr)
705
706{
707  void *cur_allocptr = NULL;
708  LispObj freeptr = 0;
709
710  if (xp) {
711    if (is_other_tcr) {
712      pc_luser_xp(xp, tcr, NULL);
713      freeptr = xpGPR(xp, allocptr);
714      if (fulltag_of(freeptr) == 0){
715        cur_allocptr = (void *) ptr_from_lispobj(freeptr);
716      }
717    }
718    update_area_active((area **)&tcr->cs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp)));
719    update_area_active((area **)&tcr->vs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, vsp)));
720  } else {
721    /* In ff-call. */
722    cur_allocptr = (void *) (tcr->save_allocptr);
723    update_area_active((area **)&tcr->vs_area, (BytePtr) tcr->save_vsp);
724    update_area_active((area **)&tcr->cs_area, (BytePtr) tcr->last_lisp_frame);
725  }
726#if 0
727    fprintf(dbgout,"tcr = 0x%x, last lisp frame = 0x%x\n", (natural) tcr , (natural) (tcr->last_lisp_frame));
728
729#endif
730  tcr->save_allocptr = tcr->save_allocbase = (void *)VOID_ALLOCPTR;
731  if (cur_allocptr) {
732    update_bytes_allocated(tcr, cur_allocptr);
733    if (freeptr) {
734      xpGPR(xp, allocptr) = VOID_ALLOCPTR;
735    }
736  }
737}
738
739TCR *gc_tcr = NULL;
740
741/* Suspend and "normalize" other tcrs, then call a gc-like function
742   in that context.  Resume the other tcrs, then return what the
743   function returned */
744
745signed_natural
746gc_like_from_xp(ExceptionInformation *xp, 
747                signed_natural(*fun)(TCR *, signed_natural), 
748                signed_natural param)
749{
750
751  TCR *tcr = get_tcr(true), *other_tcr;
752  int result;
753  signed_natural inhibit;
754
755  suspend_other_threads(true);
756  inhibit = (signed_natural)(lisp_global(GC_INHIBIT_COUNT));
757  if (inhibit != 0) {
758    if (inhibit > 0) {
759      lisp_global(GC_INHIBIT_COUNT) = (LispObj)(-inhibit);
760    }
761    resume_other_threads(true);
762    gc_deferred++;
763    return 0;
764  }
765  gc_deferred = 0;
766
767  gc_tcr = tcr;
768
769  xpGPR(xp, allocptr) = VOID_ALLOCPTR;
770
771  normalize_tcr(xp, tcr, false);
772
773
774  for (other_tcr = tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
775    if (other_tcr->pending_exception_context) {
776      other_tcr->gc_context = other_tcr->pending_exception_context;
777    } else if (other_tcr->valence == TCR_STATE_LISP) {
778      other_tcr->gc_context = other_tcr->suspend_context;
779    } else {
780      /* no pending exception, didn't suspend in lisp state:
781         must have executed a synchronous ff-call.
782      */
783      other_tcr->gc_context = NULL;
784    }
785    normalize_tcr(other_tcr->gc_context, other_tcr, true);
786  }
787   
788
789
790  result = fun(tcr, param);
791
792  other_tcr = tcr;
793  do {
794    other_tcr->gc_context = NULL;
795    other_tcr = other_tcr->next;
796  } while (other_tcr != tcr);
797
798  gc_tcr = NULL;
799
800  resume_other_threads(true);
801
802  return result;
803
804}
805
806
807
808/* Returns #bytes freed by invoking GC */
809
810signed_natural
811gc_from_tcr(TCR *tcr, signed_natural param)
812{
813  area *a;
814  BytePtr oldfree, newfree;
815  BytePtr oldend, newend;
816
817  a = active_dynamic_area;
818  oldend = a->high;
819  oldfree = a->active;
820  gc(tcr, param);
821  newfree = a->active;
822  newend = a->high;
823#if 0
824  fprintf(dbgout, "End GC  in 0x%lx\n", tcr);
825#endif
826  return ((oldfree-newfree)+(newend-oldend));
827}
828
829signed_natural
830gc_from_xp(ExceptionInformation *xp, signed_natural param)
831{
832  signed_natural status = gc_like_from_xp(xp, gc_from_tcr, param);
833
834  freeGCptrs();
835  return status;
836}
837
838signed_natural
839purify_from_xp(ExceptionInformation *xp, signed_natural param)
840{
841  return gc_like_from_xp(xp, purify, param);
842}
843
844signed_natural
845impurify_from_xp(ExceptionInformation *xp, signed_natural param)
846{
847  return gc_like_from_xp(xp, impurify, param);
848}
849
850
851
852
853
854
855protection_handler
856 * protection_handlers[] = {
857   do_spurious_wp_fault,
858   do_soft_stack_overflow,
859   do_soft_stack_overflow,
860   do_soft_stack_overflow,
861   do_hard_stack_overflow,   
862   do_hard_stack_overflow,
863   do_hard_stack_overflow
864   };
865
866
867Boolean
868is_write_fault(ExceptionInformation *xp, siginfo_t *info)
869{
870  return ((xpFaultStatus(xp) & 0x800) != 0);
871 }
872
873Boolean
874handle_protection_violation(ExceptionInformation *xp, siginfo_t *info, TCR *tcr, int old_valence)
875{
876  BytePtr addr;
877  protected_area_ptr area;
878  protection_handler *handler;
879  extern Boolean touch_page(void *);
880  extern void touch_page_end(void);
881
882#ifdef LINUX
883  addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
884#else
885  if (info) {
886    addr = (BytePtr)(info->si_addr);
887  } else {
888    addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
889  }
890#endif
891
892  if (addr && (addr == tcr->safe_ref_address)) {
893    adjust_exception_pc(xp,4);
894
895    xpGPR(xp,imm0) = 0;
896    return true;
897  }
898
899  if (xpPC(xp) == (pc)touch_page) {
900    xpGPR(xp,imm0) = 0;
901    xpPC(xp) = (pc)touch_page_end;
902    return true;
903  }
904
905
906  if (is_write_fault(xp,info)) {
907    area = find_protected_area(addr);
908    if (area != NULL) {
909      handler = protection_handlers[area->why];
910      return handler(xp, area, addr);
911    } else {
912      if ((addr >= readonly_area->low) &&
913          (addr < readonly_area->active)) {
914        UnProtectMemory((LogicalAddress)(truncate_to_power_of_2(addr,log2_page_size)),
915                        page_size);
916        return true;
917      }
918    }
919  }
920  if (old_valence == TCR_STATE_LISP) {
921    LispObj cmain = nrs_CMAIN.vcell;
922   
923    if ((fulltag_of(cmain) == fulltag_misc) &&
924      (header_subtag(header_of(cmain)) == subtag_macptr)) {
925     
926      callback_for_trap(nrs_CMAIN.vcell, xp, is_write_fault(xp,info)?SIGBUS:SIGSEGV, (natural)addr, NULL);
927    }
928  }
929  return false;
930}
931
932
933
934
935
936OSStatus
937do_hard_stack_overflow(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
938{
939#ifdef SUPPORT_PRAGMA_UNUSED
940#pragma unused(area,addr)
941#endif
942  reset_lisp_process(xp);
943  return -1;
944}
945
946extern area*
947allocate_vstack(natural useable);       /* This is in "pmcl-kernel.c" */
948
949extern area*
950allocate_tstack(natural useable);       /* This is in "pmcl-kernel.c" */
951
952
953
954
955
956
957Boolean
958lisp_frame_p(lisp_frame *spPtr)
959{
960  return (spPtr->marker == lisp_frame_marker);
961}
962
963
964int ffcall_overflow_count = 0;
965
966
967
968
969
970
971/* Note: CURRENT_VS (CURRENT_TS) is always either the area containing
972  the current value of VSP (TSP) or an older area.  */
973
974OSStatus
975do_vsp_overflow (ExceptionInformation *xp, BytePtr addr)
976{
977  TCR* tcr = get_tcr(true);
978  area *a = tcr->vs_area;
979  protected_area_ptr vsp_soft = a->softprot;
980  unprotect_area(vsp_soft);
981  signal_stack_soft_overflow(xp,vsp);
982  return 0;
983}
984
985
986
987OSStatus
988do_soft_stack_overflow(ExceptionInformation *xp, protected_area_ptr prot_area, BytePtr addr)
989{
990  /* Trying to write into a guard page on the vstack or tstack.
991     Allocate a new stack segment, emulate stwu and stwux for the TSP, and
992     signal an error_stack_overflow condition.
993      */
994  if (prot_area->why == kVSPsoftguard) {
995    return do_vsp_overflow(xp,addr);
996  }
997  unprotect_area(prot_area);
998  signal_stack_soft_overflow(xp,Rsp);
999  return 0;
1000}
1001
1002OSStatus
1003do_spurious_wp_fault(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
1004{
1005#ifdef SUPPORT_PRAGMA_UNUSED
1006#pragma unused(xp,area,addr)
1007#endif
1008  return -1;
1009}
1010
1011
1012
1013
1014     
1015
1016
1017
1018
1019
1020Boolean
1021handle_sigfpe(ExceptionInformation *xp, TCR *tcr)
1022{
1023  return false;
1024}
1025
1026
1027Boolean
1028handle_unimplemented_instruction(ExceptionInformation *xp,
1029                                 opcode instruction,
1030                                 TCR *tcr)
1031{
1032
1033  return false;
1034}
1035
1036Boolean
1037handle_exception(int xnum, 
1038                 ExceptionInformation *xp, 
1039                 TCR *tcr, 
1040                 siginfo_t *info,
1041                 int old_valence)
1042{
1043  pc program_counter;
1044  opcode instruction = 0;
1045  if (tcr->flags & (1<<TCR_FLAG_BIT_PC_LUSERED)) {
1046    tcr->flags &= ~(1<<TCR_FLAG_BIT_PC_LUSERED);
1047    return true;
1048  }
1049  if (old_valence != TCR_STATE_LISP) {
1050    return false;
1051  }
1052
1053  program_counter = xpPC(xp);
1054 
1055  if ((xnum == SIGILL) | (xnum == SIGTRAP)) {
1056    instruction = *program_counter;
1057  }
1058
1059  if (IS_ALLOC_TRAP(instruction)) {
1060    Boolean did_notify = false,
1061      *notify_ptr = &did_notify;
1062    if (did_gc_notification_since_last_full_gc) {
1063      notify_ptr = NULL;
1064    }
1065    return handle_alloc_trap(xp, tcr, notify_ptr);
1066  } else if ((xnum == SIGSEGV) ||
1067             (xnum == SIGBUS)) {
1068    return handle_protection_violation(xp, info, tcr, old_valence);
1069  } else if (xnum == SIGFPE) {
1070    return handle_sigfpe(xp, tcr);
1071  } else if ((xnum == SIGILL)) {
1072    if (IS_GC_TRAP(instruction)) {
1073      return handle_gc_trap(xp, tcr);
1074    } else if (IS_UUO(instruction)) {
1075      return handle_uuo(xp, info, instruction);
1076    } else {
1077      return handle_unimplemented_instruction(xp,instruction,tcr);
1078    }
1079  } else if (xnum == SIGNAL_FOR_PROCESS_INTERRUPT) {
1080    tcr->interrupt_pending = 0;
1081    callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
1082    return true;
1083  }
1084
1085  return false;
1086}
1087
1088void
1089adjust_exception_pc(ExceptionInformation *xp, int delta)
1090{
1091  xpPC(xp) += (delta >> 2);
1092}
1093
1094
1095/*
1096  This wants to scan backwards until "where" points to an instruction
1097   whose major opcode is either 63 (double-float) or 59 (single-float)
1098*/
1099
1100OSStatus
1101handle_fpux_binop(ExceptionInformation *xp, pc where)
1102{
1103  OSStatus err = -1;
1104  opcode *there = (opcode *) where, instr, errnum = 0;
1105  return err;
1106}
1107
1108Boolean
1109handle_uuo(ExceptionInformation *xp, siginfo_t *info, opcode the_uuo) 
1110{
1111  unsigned 
1112    format = UUO_FORMAT(the_uuo);
1113  Boolean handled = false;
1114  int bump = 4;
1115  TCR *tcr = get_tcr(true);
1116
1117  switch (format) {
1118  case uuo_format_kernel_service:
1119    {
1120      TCR * target = (TCR *)xpGPR(xp,arg_z);
1121      int service = UUO_UNARY_field(the_uuo);
1122
1123      switch (service) {
1124      case error_propagate_suspend:
1125        handled = true;
1126        break;
1127      case error_interrupt:
1128        xpGPR(xp,imm0) = (LispObj) raise_thread_interrupt(target);
1129        handled = true;
1130        break;
1131      case error_suspend:
1132        xpGPR(xp,imm0) = (LispObj) lisp_suspend_tcr(target);
1133        handled = true;
1134        break;
1135      case error_suspend_all:
1136        lisp_suspend_other_threads();
1137        handled = true;
1138        break;
1139      case error_resume:
1140        xpGPR(xp,imm0) = (LispObj) lisp_resume_tcr(target);
1141        handled = true;
1142        break;
1143      case error_resume_all:
1144        lisp_resume_other_threads();
1145        handled = true;
1146        break;
1147      case error_kill:
1148        xpGPR(xp,imm0) = (LispObj)kill_tcr(target);
1149        handled = true;
1150        break;
1151      case error_allocate_list:
1152        allocate_list(xp,tcr);
1153        handled = true;
1154        break;
1155      default:
1156        handled = false;
1157        break;
1158      }
1159      break;
1160    }
1161
1162  case uuo_format_unary:
1163    switch(UUO_UNARY_field(the_uuo)) {
1164    case 3:
1165      if (extend_tcr_tlb(tcr,xp,UUOA_field(the_uuo))) {
1166        handled = true;
1167        bump = 4;
1168        break;
1169      }
1170      /* fall in */
1171    default:
1172      handled = false;
1173      break;
1174
1175    }
1176    break;
1177
1178  case uuo_format_nullary:
1179    switch (UUOA_field(the_uuo)) {
1180    case 3:
1181      adjust_exception_pc(xp, bump);
1182      bump = 0;
1183      lisp_Debugger(xp, info, debug_entry_dbg, false, "Lisp Breakpoint");
1184      handled = true;
1185      break;
1186
1187    case 4:
1188      tcr->interrupt_pending = 0;
1189      callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
1190      handled = true;
1191      break;
1192    default:
1193      handled = false;
1194      break;
1195    }
1196    break;
1197
1198
1199  case uuo_format_error_lisptag:
1200  case uuo_format_error_fulltag:
1201  case uuo_format_error_xtype:
1202  case uuo_format_cerror_lisptag:
1203  case uuo_format_cerror_fulltag:
1204  case uuo_format_cerror_xtype:
1205  case uuo_format_nullary_error:
1206  case uuo_format_unary_error:
1207  case uuo_format_binary_error:
1208  case uuo_format_ternary:
1209  case uuo_format_ternary2:
1210    handled = handle_error(xp,0,the_uuo, &bump);
1211    break;
1212
1213  default:
1214    handled = false;
1215    bump = 0;
1216  }
1217 
1218  if (handled && bump) {
1219    adjust_exception_pc(xp, bump);
1220  }
1221  return handled;
1222}
1223
1224natural
1225register_codevector_contains_pc (natural lisp_function, pc where)
1226{
1227  natural code_vector, size;
1228
1229  if ((fulltag_of(lisp_function) == fulltag_misc) &&
1230      (header_subtag(header_of(lisp_function)) == subtag_function)) {
1231    code_vector = deref(lisp_function, 2);
1232    size = header_element_count(header_of(code_vector)) << 2;
1233    if ((untag(code_vector) < (natural)where) && 
1234        ((natural)where < (code_vector + size)))
1235      return(code_vector);
1236  }
1237
1238  return(0);
1239}
1240
1241Boolean
1242callback_for_trap (LispObj callback_macptr, ExceptionInformation *xp, natural info,natural arg, int *bumpP)
1243{
1244  return callback_to_lisp(callback_macptr, xp, info,arg, bumpP);
1245}
1246
1247Boolean
1248callback_to_lisp (LispObj callback_macptr, ExceptionInformation *xp,
1249                  natural arg1, natural arg2, int *bumpP)
1250{
1251  natural  callback_ptr;
1252  area *a;
1253  natural fnreg = Rfn,  codevector, offset;
1254  pc where = xpPC(xp);
1255  int delta;
1256
1257  codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1258  if (codevector == 0) {
1259    fnreg = nfn;
1260    codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1261    if (codevector == 0) {
1262      fnreg = 0;
1263    }
1264  }
1265  if (codevector) {
1266    offset = (natural)where - (codevector - (fulltag_misc-node_size));
1267  } else {
1268    offset = (natural)where;
1269  }
1270                                                 
1271                                               
1272
1273  TCR *tcr = get_tcr(true);
1274
1275  /* Put the active stack pointer where .SPcallback expects it */
1276  a = tcr->cs_area;
1277  a->active = (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp));
1278
1279  /* Copy globals from the exception frame to tcr */
1280  tcr->save_allocptr = (void *)ptr_from_lispobj(xpGPR(xp, allocptr));
1281  tcr->save_vsp = (LispObj*) ptr_from_lispobj(xpGPR(xp, vsp));
1282
1283
1284
1285  /* Call back.
1286     Lisp will handle trampolining through some code that
1287     will push lr/fn & pc/nfn stack frames for backtrace.
1288  */
1289  callback_ptr = ((macptr *)ptr_from_lispobj(untag(callback_macptr)))->address;
1290  UNLOCK(lisp_global(EXCEPTION_LOCK), tcr);
1291  delta = ((int (*)())callback_ptr) (xp, arg1, arg2, fnreg, offset);
1292  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1293
1294  if (bumpP) {
1295    *bumpP = delta;
1296  }
1297
1298  /* Copy GC registers back into exception frame */
1299  xpGPR(xp, allocptr) = (LispObj) ptr_to_lispobj(tcr->save_allocptr);
1300  return true;
1301}
1302
1303area *
1304allocate_no_stack (natural size)
1305{
1306#ifdef SUPPORT_PRAGMA_UNUSED
1307#pragma unused(size)
1308#endif
1309
1310  return (area *) NULL;
1311}
1312
1313
1314
1315
1316
1317
1318/* callback to (symbol-value cmain) if it is a macptr,
1319   otherwise report cause and function name to console.
1320   Returns noErr if exception handled OK */
1321OSStatus
1322handle_trap(ExceptionInformation *xp, opcode the_trap, pc where, siginfo_t *info)
1323{
1324  LispObj   cmain = nrs_CMAIN.vcell;
1325  TCR *tcr = TCR_FROM_TSD(xpGPR(xp, rcontext));
1326
1327}
1328
1329
1330
1331
1332void non_fatal_error( char *msg )
1333{
1334  fprintf( dbgout, "Non-fatal error: %s.\n", msg );
1335  fflush( dbgout );
1336}
1337
1338
1339
1340Boolean
1341handle_error(ExceptionInformation *xp, unsigned arg1, unsigned arg2, int *bumpP)
1342{
1343  LispObj   errdisp = nrs_ERRDISP.vcell;
1344
1345  if ((fulltag_of(errdisp) == fulltag_misc) &&
1346      (header_subtag(header_of(errdisp)) == subtag_macptr)) {
1347    /* errdisp is a macptr, we can call back to lisp */
1348    return callback_for_trap(errdisp, xp, arg1, arg2, bumpP);
1349    }
1350
1351  return false;
1352}
1353               
1354
1355/*
1356   Current thread has all signals masked.  Before unmasking them,
1357   make it appear that the current thread has been suspended.
1358   (This is to handle the case where another thread is trying
1359   to GC before this thread is able to sieze the exception lock.)
1360*/
1361int
1362prepare_to_wait_for_exception_lock(TCR *tcr, ExceptionInformation *context)
1363{
1364  int old_valence = tcr->valence;
1365
1366  tcr->pending_exception_context = context;
1367  tcr->valence = TCR_STATE_EXCEPTION_WAIT;
1368
1369  ALLOW_EXCEPTIONS(context);
1370  return old_valence;
1371} 
1372
1373void
1374wait_for_exception_lock_in_handler(TCR *tcr, 
1375                                   ExceptionInformation *context,
1376                                   xframe_list *xf)
1377{
1378
1379  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1380  xf->curr = context;
1381  xf->prev = tcr->xframe;
1382  tcr->xframe =  xf;
1383  tcr->pending_exception_context = NULL;
1384  tcr->valence = TCR_STATE_FOREIGN; 
1385}
1386
1387void
1388unlock_exception_lock_in_handler(TCR *tcr)
1389{
1390  tcr->pending_exception_context = tcr->xframe->curr;
1391  tcr->xframe = tcr->xframe->prev;
1392  tcr->valence = TCR_STATE_EXCEPTION_RETURN;
1393  UNLOCK(lisp_global(EXCEPTION_LOCK),tcr);
1394}
1395
1396/*
1397   If an interrupt is pending on exception exit, try to ensure
1398   that the thread sees it as soon as it's able to run.
1399*/
1400void
1401raise_pending_interrupt(TCR *tcr)
1402{
1403  if (TCR_INTERRUPT_LEVEL(tcr) > 0) {
1404    pthread_kill((pthread_t)ptr_from_lispobj(tcr->osid), SIGNAL_FOR_PROCESS_INTERRUPT);
1405  }
1406}
1407
1408void
1409exit_signal_handler(TCR *tcr, int old_valence, natural old_last_lisp_frame)
1410{
1411#ifndef ANDROID
1412  sigset_t mask;
1413  sigfillset(&mask);
1414#else
1415  int mask [] = {-1,-1};
1416#endif
1417 
1418  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask, NULL);
1419  tcr->valence = old_valence;
1420  tcr->pending_exception_context = NULL;
1421  tcr->last_lisp_frame = old_last_lisp_frame;
1422}
1423
1424
1425void
1426signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context
1427#ifdef DARWIN
1428, TCR *tcr, int old_valence, natural old_last_lisp_frame
1429#endif
1430)
1431{
1432  xframe_list xframe_link;
1433#ifndef DARWIN
1434   
1435    TCR *tcr = (TCR *) get_interrupt_tcr(false);
1436 
1437    /* The signal handler's entered with all signals (notably the
1438       thread_suspend signal) blocked.  Don't allow any other signals
1439       (notably the thread_suspend signal) to preempt us until we've
1440       set the TCR's xframe slot to include the current exception
1441       context.
1442    */
1443   
1444    natural  old_last_lisp_frame = tcr->last_lisp_frame;
1445    int old_valence;
1446
1447    if (tcr->flags & (1<<TCR_FLAG_BIT_PC_LUSERED)) {
1448      tcr->flags &= ~(1<<TCR_FLAG_BIT_PC_LUSERED);
1449      return;
1450    }
1451    tcr->last_lisp_frame = xpGPR(context,Rsp);
1452    old_valence = prepare_to_wait_for_exception_lock(tcr, context);
1453#endif
1454
1455  if (tcr->flags & (1<<TCR_FLAG_BIT_PENDING_SUSPEND)) {
1456    CLR_TCR_FLAG(tcr, TCR_FLAG_BIT_PENDING_SUSPEND);
1457    pthread_kill(pthread_self(), thread_suspend_signal);
1458  }
1459
1460 
1461  wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1462  if ((!handle_exception(signum, context, tcr, info, old_valence))) {
1463    char msg[512];
1464    snprintf(msg, sizeof(msg), "Unhandled exception %d at 0x%lx, context->regs at #x%lx", signum, xpPC(context), (natural)xpGPRvector(context));
1465    if (lisp_Debugger(context, info, signum, (old_valence != TCR_STATE_LISP), msg)) {
1466      SET_TCR_FLAG(tcr,TCR_FLAG_BIT_PROPAGATE_EXCEPTION);
1467    }
1468  }
1469  unlock_exception_lock_in_handler(tcr);
1470
1471  /* This thread now looks like a thread that was suspended while
1472     executing lisp code.  If some other thread gets the exception
1473     lock and GCs, the context (this thread's suspend_context) will
1474     be updated.  (That's only of concern if it happens before we
1475     can return to the kernel/to the Mach exception handler).
1476  */
1477  exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1478  raise_pending_interrupt(tcr);
1479}
1480
1481
1482void
1483sigill_handler(int signum, siginfo_t *info, ExceptionInformation  *xp)
1484{
1485  pc program_counter = xpPC(xp);
1486  opcode instr = *program_counter;
1487
1488  if (IS_UUO(instr)) {
1489    natural psr = xpPSR(xp);
1490    Boolean opcode_matched_condition = false,
1491      flip = ((instr & (1<<28)) != 0);
1492   
1493
1494    switch (instr >> 29) {
1495    case 0: 
1496      opcode_matched_condition = ((psr & PSR_Z_MASK) != 0);
1497      break;
1498    case 1:
1499      opcode_matched_condition = ((psr & PSR_C_MASK) != 0);
1500      break;
1501    case 2:
1502      opcode_matched_condition = ((psr & PSR_N_MASK) != 0);
1503      break;
1504    case 3:
1505      opcode_matched_condition = ((psr & PSR_V_MASK) != 0);
1506      break;
1507    case 4:
1508      opcode_matched_condition = (((psr & PSR_C_MASK) != 0) &&
1509                                  ((psr & PSR_Z_MASK) == 0));
1510      break;
1511    case 5:
1512      opcode_matched_condition = (((psr & PSR_N_MASK) != 0) ==
1513                                  ((psr & PSR_V_MASK) != 0));
1514      break;
1515    case 6:
1516      opcode_matched_condition = ((((psr & PSR_N_MASK) != 0) ==
1517                                   ((psr & PSR_V_MASK) != 0)) &&
1518                                  ((psr & PSR_Z_MASK) == 0));
1519      break;
1520    case 7:
1521      opcode_matched_condition = true;
1522      flip = false;
1523      break;
1524    }
1525    if (flip) {
1526      opcode_matched_condition = !opcode_matched_condition;
1527    }
1528    if (!opcode_matched_condition) {
1529      adjust_exception_pc(xp,4);
1530      return;
1531    }
1532  }
1533  signal_handler(signum,info,xp);
1534}
1535
1536
1537#ifdef USE_SIGALTSTACK
1538void
1539invoke_handler_on_main_stack(int signo, siginfo_t *info, ExceptionInformation *xp, void *return_address, void *handler)
1540{
1541  ExceptionInformation *xp_copy;
1542  siginfo_t *info_copy;
1543  extern void call_handler_on_main_stack(int, siginfo_t *, ExceptionInformation *,void *, void *);
1544 
1545  BytePtr target_sp= (BytePtr)xpGPR(xp,Rsp);
1546  target_sp -= sizeof(ucontext_t);
1547  xp_copy = (ExceptionInformation *)target_sp;
1548  memmove(target_sp,xp,sizeof(*xp));
1549  xp_copy->uc_stack.ss_sp = 0;
1550  xp_copy->uc_stack.ss_size = 0;
1551  xp_copy->uc_stack.ss_flags = 0;
1552  xp_copy->uc_link = NULL;
1553  target_sp -= sizeof(siginfo_t);
1554  info_copy = (siginfo_t *)target_sp;
1555  memmove(target_sp,info,sizeof(*info));
1556  call_handler_on_main_stack(signo, info_copy, xp_copy, return_address, handler);
1557}
1558 
1559void
1560altstack_signal_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1561{
1562  TCR *tcr=get_tcr(true);
1563 
1564  if (signo == SIGBUS) {
1565    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address); 
1566    area *a = tcr->cs_area;
1567    if (((BytePtr)truncate_to_power_of_2(addr,log2_page_size))== a->softlimit) 
1568{
1569      if (mmap(a->softlimit,
1570               page_size,
1571               PROT_READ|PROT_WRITE|PROT_EXEC,
1572               MAP_PRIVATE|MAP_ANON|MAP_FIXED,
1573               -1,
1574               0) == a->softlimit) {
1575        return;
1576      }
1577    }
1578  } else if (signo == SIGSEGV) {
1579    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address);
1580    area *a = tcr->cs_area;
1581   
1582    if ((addr >= a->low) &&
1583        (addr < a->softlimit)) {
1584      if (addr < a->hardlimit) {
1585        Bug(xp, "hard stack overflow");
1586      } else {
1587        UnProtectMemory(a->hardlimit,a->softlimit-a->hardlimit);
1588      }
1589    }
1590  }
1591  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), signal_handler);
1592}
1593#endif
1594
1595/*
1596  If it looks like we're in the middle of an atomic operation, make
1597  it seem as if that operation is either complete or hasn't started
1598  yet.
1599
1600  The cases handled include:
1601
1602  a) storing into a newly-allocated lisp frame on the stack.
1603  b) marking a newly-allocated TSP frame as containing "raw" data.
1604  c) consing: the GC has its own ideas about how this should be
1605     handled, but other callers would be best advised to back
1606     up or move forward, according to whether we're in the middle
1607     of allocating a cons cell or allocating a uvector.
1608  d) a STMW to the vsp
1609  e) EGC write-barrier subprims.
1610*/
1611
1612extern opcode
1613  egc_write_barrier_start,
1614  egc_write_barrier_end, 
1615  egc_store_node_conditional, 
1616  egc_store_node_conditional_test,
1617  egc_set_hash_key, egc_set_hash_key_did_store,
1618  egc_gvset, egc_gvset_did_store,
1619  egc_rplaca, egc_rplaca_did_store,
1620  egc_rplacd, egc_rplacd_did_store,
1621  egc_set_hash_key_conditional,
1622  egc_set_hash_key_conditional_test,
1623  swap_lr_lisp_frame_temp0,
1624  swap_lr_lisp_frame_arg_z,
1625  ffcall_funky,
1626  ffcall_funky_end;
1627
1628
1629extern opcode ffcall_return_window, ffcall_return_window_end;
1630
1631alloc_instruction_id  classify_alloc_instruction (ExceptionInformation *xp)
1632{
1633  pc program_counter = xpPC(xp);
1634  opcode instr = *program_counter;
1635
1636  if (IS_SUB_FROM_ALLOCPTR(instr) ||
1637      IS_SUB_RM_FROM_ALLOCPTR(instr)) {
1638    return ID_adjust_allocptr_instruction;
1639  }
1640  if (IS_LOAD_RD_FROM_ALLOCBASE(instr)) {
1641    return ID_load_allocbase_instruction;
1642  }
1643  if (IS_COMPARE_ALLOCPTR_TO_RM(instr)) {
1644    return ID_compare_allocptr_to_allocbase_instruction;
1645  }
1646  if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
1647    return ID_branch_around_alloc_trap_instruction;
1648  }
1649  if (IS_ALLOC_TRAP(instr)) {
1650    return ID_alloc_trap_instruction;
1651  }
1652  if (IS_CLR_ALLOCPTR_TAG(instr) ||
1653      IS_SET_ALLOCPTR_RESULT_RD(instr) ||
1654      IS_SET_ALLOCPTR_CDR_RD(instr) ||
1655      IS_SET_ALLOCPTR_CAR_RD(instr) ||
1656      IS_SET_ALLOCPTR_HEADER_RD(instr)) {
1657        return ID_finish_allocation;
1658      }
1659  return ID_unrecognized_alloc_instruction;
1660}
1661 
1662
1663void
1664restart_allocation(ExceptionInformation *xp)
1665{
1666  pc p = xpPC(xp);
1667  opcode instr = *p;
1668
1669  while (1) {
1670    if (IS_SUB_RM_FROM_ALLOCPTR(instr) ||
1671        IS_SUB_LO_FROM_ALLOCPTR(instr)) {
1672      xpPC(xp) = p;
1673      return;
1674    } else {
1675      --p;
1676      instr = *p;
1677    }
1678  }
1679}
1680extern pc _SPeabi_ff_call_simple, _SPdebind;
1681
1682void
1683pc_luser_xp(ExceptionInformation *xp, TCR *tcr, signed_natural *alloc_disp)
1684{
1685  pc program_counter = xpPC(xp);
1686  opcode instr = *program_counter;
1687  lisp_frame *frame = (lisp_frame *)ptr_from_lispobj(xpGPR(xp,Rsp));
1688  LispObj cur_allocptr = xpGPR(xp, allocptr);
1689  int allocptr_tag = fulltag_of(cur_allocptr);
1690 
1691  /* fprintf(dbgout, "0x%x @ 0x%x\n", (natural)tcr,(natural)program_counter); */
1692
1693
1694  if ((program_counter < &egc_write_barrier_end) && 
1695      (program_counter >= &egc_write_barrier_start)) {
1696    LispObj *ea = 0, val = 0, root = 0;
1697    bitvector refbits = (bitvector)(lisp_global(REFBITS));
1698    Boolean need_check_memo = true, need_memoize_root = false;
1699
1700    if (program_counter >= &egc_set_hash_key_conditional) {
1701      if ((program_counter < &egc_set_hash_key_conditional_test) ||
1702          ((program_counter == &egc_set_hash_key_conditional_test) &&
1703           (xpGPR(xp,imm0) != 0))) {
1704        return;
1705      }
1706      root = xpGPR(xp,arg_x);
1707      ea = (LispObj *) (root+unbox_fixnum(xpGPR(xp,temp2)));
1708      need_memoize_root = true;
1709    } else if (program_counter >= &egc_store_node_conditional) {
1710      if ((program_counter < &egc_store_node_conditional_test) ||
1711          ((program_counter == &egc_store_node_conditional_test) &&
1712           (xpGPR(xp,imm0) != 0))) {
1713        /* The conditional store either hasn't been attempted yet, or
1714           has failed.  No need to adjust the PC, or do memoization. */
1715        return;
1716      }
1717      ea = (LispObj*)(xpGPR(xp,arg_x) + unbox_fixnum(xpGPR(xp,temp2)));
1718      xpGPR(xp,arg_z) = t_value;
1719    } else if (program_counter >= &egc_set_hash_key) {
1720      if (program_counter < &egc_set_hash_key_did_store) {
1721        return;
1722      }
1723      root = xpGPR(xp,arg_x);
1724      val = xpGPR(xp,arg_z);
1725      ea = (LispObj *) (root+xpGPR(xp,arg_y)+misc_data_offset);
1726      need_memoize_root = true;
1727    } else if (program_counter >= &egc_gvset) {
1728      if (program_counter < &egc_gvset_did_store) {
1729        return;
1730      } 
1731      ea = (LispObj *) (xpGPR(xp,arg_x)+xpGPR(xp,arg_y)+misc_data_offset);
1732      val = xpGPR(xp,arg_z);
1733    } else if (program_counter >= &egc_rplacd) {
1734      if (program_counter < &egc_rplacd_did_store) {
1735        return;
1736      } 
1737      ea = (LispObj *) untag(xpGPR(xp,arg_y));
1738      val = xpGPR(xp,arg_z);
1739    } else {                      /* egc_rplaca */
1740      if (program_counter < &egc_rplaca_did_store) {
1741        return;
1742      } 
1743      ea =  ((LispObj *) untag(xpGPR(xp,arg_y)))+1;
1744      val = xpGPR(xp,arg_z);
1745    }
1746    if (need_check_memo) {
1747      natural  bitnumber = area_dnode(ea, lisp_global(REF_BASE));
1748      if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT)) &&
1749          ((LispObj)ea < val)) {
1750        atomic_set_bit(refbits, bitnumber);
1751        atomic_set_bit(global_refidx, bitnumber>>8);
1752        if (need_memoize_root) {
1753          bitnumber = area_dnode(root, lisp_global(REF_BASE));
1754          atomic_set_bit(refbits, bitnumber);
1755          atomic_set_bit(global_refidx,bitnumber>>8);
1756        }
1757      }
1758    }
1759    tcr->flags |= (1<<TCR_FLAG_BIT_PC_LUSERED);
1760    dsb();
1761
1762    xpPC(xp) = xpLR(xp);
1763    return;
1764  }
1765
1766
1767 
1768  if (allocptr_tag != tag_fixnum) {
1769    alloc_instruction_id state = classify_alloc_instruction(xp);
1770    if (state != ID_unrecognized_alloc_instruction) {
1771
1772      if (state == ID_finish_allocation) {
1773        if (allocptr_tag == fulltag_cons) {
1774          finish_allocating_cons(xp);
1775         
1776        } else {
1777          if (allocptr_tag == fulltag_misc) {
1778            finish_allocating_uvector(xp);
1779          } else {
1780            Bug(xp, "what's being allocated here ?");
1781          }
1782        }
1783      } else {
1784        restart_allocation(xp);
1785      }
1786      xpGPR(xp,allocptr) = VOID_ALLOCPTR;
1787#if 0
1788      if (state == 1777 + ID_alloc_trap_instruction) {
1789        /* what a tangled web we weave */
1790        xpGPR(xp,allocptr)+=allocptr_displacement(xp);
1791      }
1792#endif
1793    } else {
1794      Bug(xp, "urecognized allocation atate");
1795    }
1796    tcr->flags |= (1<<TCR_FLAG_BIT_PC_LUSERED);
1797    dsb();
1798    return;
1799  }
1800  {
1801    lisp_frame *swap_frame = NULL;
1802    pc base = &swap_lr_lisp_frame_temp0;
1803   
1804    if ((program_counter >base)             /* sic */
1805        && (program_counter < (base+3))) {
1806      swap_frame = (lisp_frame *)xpGPR(xp,temp0);
1807    } else {
1808      base = &swap_lr_lisp_frame_arg_z;
1809      if ((program_counter > base) && (program_counter < (base+3))) { 
1810        swap_frame = (lisp_frame *)xpGPR(xp,arg_z);
1811      }
1812    }
1813    if (swap_frame) {
1814      if (program_counter == (base+1)) {
1815        swap_frame->savelr = xpGPR(xp,Rlr);
1816      }
1817      xpGPR(xp,Rlr) = xpGPR(xp,imm0);
1818      xpPC(xp) = base+3;
1819      tcr->flags |= (1<<TCR_FLAG_BIT_PC_LUSERED);
1820      dsb();
1821      return;
1822    }
1823  }
1824}
1825
1826void
1827interrupt_handler (int signum, siginfo_t *info, ExceptionInformation *context)
1828{
1829  TCR *tcr = get_interrupt_tcr(false);
1830  if (tcr) {
1831    if (TCR_INTERRUPT_LEVEL(tcr) < 0) {
1832      tcr->interrupt_pending = 1 << fixnumshift;
1833    } else {
1834      LispObj cmain = nrs_CMAIN.vcell;
1835
1836      if ((fulltag_of(cmain) == fulltag_misc) &&
1837          (header_subtag(header_of(cmain)) == subtag_macptr)) {
1838        /*
1839           This thread can (allegedly) take an interrupt now.
1840           It's tricky to do that if we're executing
1841           foreign code (especially Linuxthreads code, much
1842           of which isn't reentrant.)
1843           If we're unwinding the stack, we also want to defer
1844           the interrupt.
1845        */
1846        if ((tcr->valence != TCR_STATE_LISP) ||
1847            (tcr->unwinding != 0)) {
1848          tcr->interrupt_pending = 1 << fixnumshift;
1849        } else {
1850          xframe_list xframe_link;
1851          int old_valence;
1852          signed_natural disp=0;
1853          natural old_last_lisp_frame = tcr->last_lisp_frame;
1854         
1855          tcr->last_lisp_frame = xpGPR(context,Rsp);
1856          pc_luser_xp(context, tcr, &disp);
1857          old_valence = prepare_to_wait_for_exception_lock(tcr, context);
1858          wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1859          handle_exception(signum, context, tcr, info, old_valence);
1860          if (disp) {
1861            xpGPR(context,allocptr) -= disp;
1862          }
1863          unlock_exception_lock_in_handler(tcr);
1864          exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1865        }
1866      }
1867    }
1868  }
1869#ifdef DARWIN
1870    DarwinSigReturn(context);
1871#endif
1872}
1873
1874#ifdef USE_SIGALTSTACK
1875void
1876altstack_interrupt_handler(int signum, siginfo_t *info, ExceptionInformation *context)
1877{
1878  invoke_handler_on_main_stack(signum, info, context, __builtin_return_address(0),interrupt_handler);
1879}
1880#endif
1881
1882
1883void
1884install_signal_handler(int signo, void *handler, unsigned flags)
1885{
1886  struct sigaction sa;
1887  int err;
1888
1889  sigfillset(&sa.sa_mask);
1890#if 1
1891  if (signo == SIGILL) {
1892    sigdelset(&sa.sa_mask, thread_suspend_signal);
1893  }
1894#endif
1895#if 0
1896  if (signo == thread_suspend_signal) {
1897    sigdelset(&sa.sa_mask, SIGILL);
1898    sigdelset(&sa.sa_mask, SIGTRAP);   
1899  }
1900#endif
1901 
1902  sa.sa_sigaction = (void *)handler;
1903  sigfillset(&sa.sa_mask);
1904  sa.sa_flags = SA_SIGINFO;
1905
1906#ifdef ANDROID
1907  sa.sa_flags |= SA_NODEFER;
1908#endif
1909#ifdef USE_SIGALTSTACK
1910  if (flags & ON_ALTSTACK)
1911    sa.sa_flags |= SA_ONSTACK;
1912#endif
1913  if (flags & RESTART_SYSCALLS)
1914    sa.sa_flags |= SA_RESTART;
1915  if (flags & RESERVE_FOR_LISP) {
1916    extern sigset_t user_signals_reserved;
1917    sigaddset(&user_signals_reserved, signo);
1918  }
1919
1920  err = sigaction(signo, &sa, NULL);
1921  if (err) {
1922    perror("sigaction");
1923    exit(1);
1924  }
1925}
1926
1927
1928void
1929install_pmcl_exception_handlers()
1930{
1931  install_signal_handler(SIGILL, (void *)signal_handler, RESERVE_FOR_LISP);
1932  install_signal_handler(SIGSEGV, (void *)ALTSTACK(signal_handler),
1933                         RESERVE_FOR_LISP|ON_ALTSTACK);
1934  install_signal_handler(SIGBUS, (void *)ALTSTACK(signal_handler),
1935                           RESERVE_FOR_LISP|ON_ALTSTACK);
1936  install_signal_handler(SIGNAL_FOR_PROCESS_INTERRUPT,
1937                         (void *)interrupt_handler, RESERVE_FOR_LISP);
1938  signal(SIGPIPE, SIG_IGN);
1939}
1940
1941
1942#ifdef USE_SIGALTSTACK
1943void
1944setup_sigaltstack(area *a)
1945{
1946  stack_t stack;
1947#if 0
1948  stack.ss_sp = a->low;
1949  a->low += SIGSTKSZ*8;
1950#endif
1951  stack.ss_size = SIGSTKSZ*8;
1952  stack.ss_flags = 0;
1953  stack.ss_sp = mmap(NULL,stack.ss_size, PROT_READ|PROT_WRITE|PROT_EXEC,MAP_ANON|MAP_PRIVATE,-1,0);
1954  if (sigaltstack(&stack, NULL) != 0) {
1955    perror("sigaltstack");
1956    exit(-1);
1957  }
1958}
1959#endif
1960
1961void
1962thread_kill_handler(int signum, siginfo_t info, ExceptionInformation *xp)
1963{
1964  TCR *tcr = get_tcr(false);
1965  area *a;
1966#ifndef ANDROID
1967  sigset_t mask;
1968 
1969  sigemptyset(&mask);
1970#else
1971  int mask[] = {0,0};
1972#endif
1973
1974  if (tcr) {
1975    tcr->valence = TCR_STATE_FOREIGN;
1976    a = tcr->vs_area;
1977    if (a) {
1978      a->active = a->high;
1979    }
1980    a = tcr->cs_area;
1981    if (a) {
1982      a->active = a->high;
1983    }
1984  }
1985 
1986  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask,NULL);
1987  pthread_exit(NULL);
1988}
1989
1990#ifdef USE_SIGALTSTACK
1991void
1992altstack_thread_kill_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1993{
1994  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), thread_kill_handler);
1995}
1996#endif
1997
1998void
1999thread_signal_setup()
2000{
2001  thread_suspend_signal = SIG_SUSPEND_THREAD;
2002  thread_kill_signal = SIG_KILL_THREAD;
2003
2004  install_signal_handler(thread_suspend_signal, (void *)suspend_resume_handler,
2005                         RESERVE_FOR_LISP|RESTART_SYSCALLS);
2006  install_signal_handler(thread_kill_signal, (void *)thread_kill_handler,
2007                         RESERVE_FOR_LISP);
2008}
2009
2010
2011
2012void
2013unprotect_all_areas()
2014{
2015  protected_area_ptr p;
2016
2017  for(p = AllProtectedAreas, AllProtectedAreas = NULL; p; p = p->next) {
2018    unprotect_area(p);
2019  }
2020}
2021
2022/*
2023  A binding subprim has just done "twlle limit_regno,idx_regno" and
2024  the trap's been taken.  Extend the tcr's tlb so that the index will
2025  be in bounds and the new limit will be on a page boundary, filling
2026  in the new page(s) with 'no_thread_local_binding_marker'.  Update
2027  the tcr fields and the registers in the xp and return true if this
2028  all works, false otherwise.
2029
2030  Note that the tlb was allocated via malloc, so realloc can do some
2031  of the hard work.
2032*/
2033Boolean
2034extend_tcr_tlb(TCR *tcr, 
2035               ExceptionInformation *xp, 
2036               unsigned idx_regno)
2037{
2038  unsigned
2039    index = (unsigned) (xpGPR(xp,idx_regno)),
2040    old_limit = tcr->tlb_limit,
2041    new_limit = align_to_power_of_2(index+1,12),
2042    new_bytes = new_limit-old_limit;
2043  LispObj
2044    *old_tlb = tcr->tlb_pointer,
2045    *new_tlb = realloc(old_tlb, new_limit),
2046    *work;
2047
2048  if (new_tlb == NULL) {
2049    return false;
2050  }
2051 
2052  work = (LispObj *) ((BytePtr)new_tlb+old_limit);
2053
2054  while (new_bytes) {
2055    *work++ = no_thread_local_binding_marker;
2056    new_bytes -= sizeof(LispObj);
2057  }
2058  tcr->tlb_pointer = new_tlb;
2059  tcr->tlb_limit = new_limit;
2060  return true;
2061}
2062
2063
2064
2065void
2066exception_init()
2067{
2068  install_pmcl_exception_handlers();
2069}
2070
2071
2072
2073
2074
2075
2076
2077void
2078early_signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context)
2079{
2080  extern pc feature_check_fpu,feature_check_ldrex,feature_check_clrex;
2081  extern int arm_architecture_version;
2082 
2083  if ((xpPC(context) == feature_check_fpu) ||
2084      (xpPC(context) == feature_check_ldrex)) {
2085    arm_architecture_version = 5;
2086  } else {
2087    arm_architecture_version = 6;
2088  }
2089  xpPC(context) = xpLR(context);
2090}
Note: See TracBrowser for help on using the repository browser.