source: release/1.9/source/lisp-kernel/arm-exceptions.c @ 15755

Last change on this file since 15755 was 15755, checked in by gb, 6 years ago

Recent changes from trunk.

File size: 52.1 KB
RevLine 
[14545]1
2
[14119]3/*
4   Copyright (C) 2010 Clozure Associates
5   This file is part of Clozure CL. 
6
7   Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
8   License , known as the LLGPL and distributed with Clozure CL as the
9   file "LICENSE".  The LLGPL consists of a preamble and the LGPL,
10   which is distributed with Clozure CL as the file "LGPL".  Where these
11   conflict, the preamble takes precedence. 
12
13   Clozure CL is referenced in the preamble as the "LIBRARY."
14
15   The LLGPL is also available online at
16   http://opensource.franz.com/preamble.html
17*/
18
19#include "lisp.h"
20#include "lisp-exceptions.h"
21#include "lisp_globals.h"
22#include <ctype.h>
23#include <stdio.h>
24#include <stddef.h>
25#include <string.h>
26#include <stdarg.h>
27#include <errno.h>
28#include <stdio.h>
29#ifdef LINUX
30#include <strings.h>
31#include <sys/mman.h>
[14271]32#ifndef ANDROID
[14119]33#include <fpu_control.h>
34#include <linux/prctl.h>
35#endif
[14271]36#endif
[14119]37
38#ifdef DARWIN
39#include <sys/mman.h>
40#ifndef SA_NODEFER
41#define SA_NODEFER 0
42#endif
43#include <sysexits.h>
44
[14549]45
[14119]46/* a distinguished UUO at a distinguished address */
47extern void pseudo_sigreturn(ExceptionInformation *);
48#endif
49
50
[14197]51#include "threads.h"
[14119]52
[14549]53#ifdef ANDROID
54#define pthread_sigmask(how,in,out) rt_sigprocmask(how,in,out,8)
55#endif
[14119]56
57#ifdef LINUX
58
59void
60enable_fp_exceptions()
61{
62}
63
64void
65disable_fp_exceptions()
66{
67}
68#endif
69
70/*
71  Handle exceptions.
72
73*/
74
75extern LispObj lisp_nil;
76
77extern natural lisp_heap_gc_threshold;
78extern Boolean grow_dynamic_area(natural);
79
[14807]80Boolean allocation_enabled = true;
[14119]81
82
83
84
85int
86page_size = 4096;
87
88int
89log2_page_size = 12;
90
91
92
93
94
95/*
96  If the PC is pointing to an allocation trap, the previous instruction
97  must have decremented allocptr.  Return the non-zero amount by which
98  allocptr was decremented.
99*/
100signed_natural
101allocptr_displacement(ExceptionInformation *xp)
102{
103  pc program_counter = xpPC(xp);
104  opcode instr = *program_counter, prev_instr;
[14515]105  int delta = -3;
[14119]106
107  if (IS_ALLOC_TRAP(instr)) {
108    /* The alloc trap must have been preceded by a cmp and a
109       load from tcr.allocbase. */
[14515]110    if (IS_BRANCH_AROUND_ALLOC_TRAP(program_counter[-1])) {
111      delta = -4;
112    }
113    prev_instr = program_counter[delta];
[14119]114
115    if (IS_SUB_RM_FROM_ALLOCPTR(prev_instr)) {
116      return -((signed_natural)xpGPR(xp,RM_field(prev_instr)));
117    }
118   
119    if (IS_SUB_LO_FROM_ALLOCPTR(prev_instr)) {
120      return -((signed_natural)(prev_instr & 0xff));
121    }
122
123    if (IS_SUB_FROM_ALLOCPTR(prev_instr)) {
124      natural disp = ror(prev_instr&0xff,(prev_instr&0xf00)>>7);
125
[14515]126      instr = program_counter[delta-1];
[14119]127      if (IS_SUB_LO_FROM_ALLOCPTR(instr)) {
128        return -((signed_natural)(disp | (instr & 0xff)));
129      }
130    }
131    Bug(xp, "Can't determine allocation displacement");
132  }
133  return 0;
134}
135
136
137/*
138  A cons cell's been successfully allocated, but the allocptr's
139  still tagged (as fulltag_cons, of course.)  Emulate any instructions
140  that might follow the allocation (stores to the car or cdr, an
141  assignment to the "result" gpr) that take place while the allocptr's
142  tag is non-zero, advancing over each such instruction.  When we're
143  done, the cons cell will be allocated and initialized, the result
144  register will point to it, the allocptr will be untagged, and
145  the PC will point past the instruction that clears the allocptr's
146  tag.
147*/
148void
149finish_allocating_cons(ExceptionInformation *xp)
150{
151  pc program_counter = xpPC(xp);
152  opcode instr;
153  LispObj cur_allocptr = xpGPR(xp, allocptr);
154  cons *c = (cons *)ptr_from_lispobj(untag(cur_allocptr));
155  int target_reg;
156
157  while (1) {
158    instr = *program_counter++;
159
160    if (IS_CLR_ALLOCPTR_TAG(instr)) {
161      xpGPR(xp, allocptr) = untag(cur_allocptr);
162      xpPC(xp) = program_counter;
163      return;
164    } else if (IS_SET_ALLOCPTR_CAR_RD(instr)) {
165      c->car = xpGPR(xp,RD_field(instr));
166    } else if (IS_SET_ALLOCPTR_CDR_RD(instr)) {
167      c->cdr = xpGPR(xp,RD_field(instr));
168    } else {
169      /* assert(IS_SET_ALLOCPTR_RESULT_RD(instr)) */
170      xpGPR(xp,RD_field(instr)) = cur_allocptr;
171    }
172  }
173}
174
175/*
176  We were interrupted in the process of allocating a uvector; we
177  survived the allocation trap, and allocptr is tagged as fulltag_misc.
178  Emulate any instructions which store a header into the uvector,
179  assign the value of allocptr to some other register, and clear
180  allocptr's tag.  Don't expect/allow any other instructions in
181  this environment.
182*/
183void
184finish_allocating_uvector(ExceptionInformation *xp)
185{
186  pc program_counter = xpPC(xp);
187  opcode instr;
188  LispObj cur_allocptr = xpGPR(xp, allocptr);
189  int target_reg;
190
191  while (1) {
192    instr = *program_counter++;
193    if (IS_CLR_ALLOCPTR_TAG(instr)) {
194      xpGPR(xp, allocptr) = untag(cur_allocptr);
195      xpPC(xp) = program_counter;
196      return;
197    }
198    if (IS_SET_ALLOCPTR_HEADER_RD(instr)) {
[14366]199      header_of(cur_allocptr) = xpGPR(xp,RD_field(instr));
[14119]200    } else if (IS_SET_ALLOCPTR_RESULT_RD(instr)) {
201      xpGPR(xp,RD_field(instr)) = cur_allocptr;
202    } else {
203      Bug(xp, "Unexpected instruction following alloc trap at " LISP ":",program_counter);
204    }
205  }
206}
207
208
209Boolean
210allocate_object(ExceptionInformation *xp,
211                natural bytes_needed, 
212                signed_natural disp_from_allocptr,
213                TCR *tcr)
214{
215  area *a = active_dynamic_area;
216
217  /* Maybe do an EGC */
218  if (a->older && lisp_global(OLDEST_EPHEMERAL)) {
219    if (((a->active)-(a->low)) >= a->threshold) {
220      gc_from_xp(xp, 0L);
221    }
222  }
223
224  /* Life is pretty simple if we can simply grab a segment
225     without extending the heap.
226  */
227  if (new_heap_segment(xp, bytes_needed, false, tcr)) {
228    xpGPR(xp, allocptr) += disp_from_allocptr;
229    return true;
230  }
231 
232  /* It doesn't make sense to try a full GC if the object
233     we're trying to allocate is larger than everything
234     allocated so far.
235  */
236  if ((lisp_global(HEAP_END)-lisp_global(HEAP_START)) > bytes_needed) {
237    untenure_from_area(tenured_area); /* force a full GC */
238    gc_from_xp(xp, 0L);
239  }
240 
241  /* Try again, growing the heap if necessary */
242  if (new_heap_segment(xp, bytes_needed, true, tcr)) {
243    xpGPR(xp, allocptr) += disp_from_allocptr;
244    return true;
245  }
246 
247  return false;
248}
249
250#ifndef XNOMEM
251#define XNOMEM 10
252#endif
253
254void
255update_bytes_allocated(TCR* tcr, void *cur_allocptr)
256{
257  BytePtr
258    last = (BytePtr) tcr->last_allocptr, 
259    current = (BytePtr) cur_allocptr;
260  if (last && (cur_allocptr != ((void *)VOID_ALLOCPTR))) {
261    tcr->bytes_allocated += last-current;
262  }
263  tcr->last_allocptr = 0;
264}
265
266void
267lisp_allocation_failure(ExceptionInformation *xp, TCR *tcr, natural bytes_needed)
268{
269  /* Couldn't allocate the object.  If it's smaller than some arbitrary
270     size (say 128K bytes), signal a "chronically out-of-memory" condition;
271     else signal a "allocation request failed" condition.
272  */
273  xpGPR(xp,allocptr) = xpGPR(xp,allocbase) = VOID_ALLOCPTR;
274  handle_error(xp, bytes_needed < (128<<10) ? XNOMEM : error_alloc_failed,0, NULL);
275}
276
277/*
278  Allocate a large list, where "large" means "large enough to
279  possibly trigger the EGC several times if this was done
280  by individually allocating each CONS."  The number of
281  ocnses in question is in arg_z; on successful return,
282  the list will be in arg_z
283*/
284
285Boolean
286allocate_list(ExceptionInformation *xp, TCR *tcr)
287{
288  natural
289    nconses = (unbox_fixnum(xpGPR(xp,arg_z))),
290    bytes_needed = (nconses << dnode_shift);
291  LispObj
292    prev = lisp_nil,
293    current,
294    initial = xpGPR(xp,arg_y);
295
296  if (nconses == 0) {
297    /* Silly case */
298    xpGPR(xp,arg_z) = lisp_nil;
299    xpGPR(xp,allocptr) = lisp_nil;
300    return true;
301  }
302  update_bytes_allocated(tcr, (void *)(void *) tcr->save_allocptr);
303  if (allocate_object(xp,bytes_needed,(-bytes_needed)+fulltag_cons,tcr)) {
304    for (current = xpGPR(xp,allocptr);
305         nconses;
306         prev = current, current+= dnode_size, nconses--) {
307      deref(current,0) = prev;
308      deref(current,1) = initial;
309    }
310    xpGPR(xp,arg_z) = prev;
311    xpGPR(xp,arg_y) = xpGPR(xp,allocptr);
312    xpGPR(xp,allocptr)-=fulltag_cons;
313  } else {
314    lisp_allocation_failure(xp,tcr,bytes_needed);
315  }
316  return true;
317}
318
319Boolean
320handle_alloc_trap(ExceptionInformation *xp, TCR *tcr)
321{
322  pc program_counter;
323  natural cur_allocptr, bytes_needed = 0;
324  opcode prev_instr;
325  signed_natural disp = 0;
326  unsigned allocptr_tag;
327
[14807]328  if (!allocation_enabled) {
329    /* Back up before the alloc_trap, then let pc_luser_xp() back
330       up some more. */
331    xpPC(xp)-=1;
332    pc_luser_xp(xp,tcr, NULL);
333    allocation_enabled = true;
334    tcr->save_allocbase = (void *)VOID_ALLOCPTR;
335    handle_error(xp, error_allocation_disabled,0,NULL);
336    return true;
337  }
338
[14119]339  cur_allocptr = xpGPR(xp,allocptr);
340
341  allocptr_tag = fulltag_of(cur_allocptr);
342
343  switch (allocptr_tag) {
344  case fulltag_cons:
345    bytes_needed = sizeof(cons);
346    disp = -sizeof(cons) + fulltag_cons;
347    break;
348
349  case fulltag_misc:
350    disp = allocptr_displacement(xp);
351    bytes_needed = (-disp) + fulltag_misc;
352    break;
353
354    /* else fall thru */
355  default:
356    return false;
357  }
358
359  update_bytes_allocated(tcr,((BytePtr)(cur_allocptr-disp)));
360  if (allocate_object(xp, bytes_needed, disp, tcr)) {
361    adjust_exception_pc(xp,4);
362    return true;
363  }
364  lisp_allocation_failure(xp,tcr,bytes_needed);
365  return true;
366}
367
368natural gc_deferred = 0, full_gc_deferred = 0;
369
370signed_natural
371flash_freeze(TCR *tcr, signed_natural param)
372{
373  return 0;
374}
375
376Boolean
377handle_gc_trap(ExceptionInformation *xp, TCR *tcr)
378{
379  LispObj
380    selector = xpGPR(xp,imm0), 
381    arg = xpGPR(xp,imm1);
382  area *a = active_dynamic_area;
383  Boolean egc_was_enabled = (a->older != NULL);
384  natural gc_previously_deferred = gc_deferred;
385
386
387  switch (selector) {
388  case GC_TRAP_FUNCTION_EGC_CONTROL:
389    egc_control(arg != 0, a->active);
390    xpGPR(xp,arg_z) = lisp_nil + (egc_was_enabled ? t_offset : 0);
391    break;
392
393  case GC_TRAP_FUNCTION_CONFIGURE_EGC:
394    a->threshold = unbox_fixnum(xpGPR(xp, arg_x));
395    g1_area->threshold = unbox_fixnum(xpGPR(xp, arg_y));
396    g2_area->threshold = unbox_fixnum(xpGPR(xp, arg_z));
397    xpGPR(xp,arg_z) = lisp_nil+t_offset;
398    break;
399
400  case GC_TRAP_FUNCTION_SET_LISP_HEAP_THRESHOLD:
401    if (((signed_natural) arg) > 0) {
402      lisp_heap_gc_threshold = 
403        align_to_power_of_2((arg-1) +
404                            (heap_segment_size - 1),
405                            log2_heap_segment_size);
406    }
407    /* fall through */
408  case GC_TRAP_FUNCTION_GET_LISP_HEAP_THRESHOLD:
409    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
410    break;
411
412  case GC_TRAP_FUNCTION_USE_LISP_HEAP_THRESHOLD:
413    /*  Try to put the current threshold in effect.  This may
414        need to disable/reenable the EGC. */
415    untenure_from_area(tenured_area);
416    resize_dynamic_heap(a->active,lisp_heap_gc_threshold);
417    if (egc_was_enabled) {
418      if ((a->high - a->active) >= a->threshold) {
419        tenure_to_area(tenured_area);
420      }
421    }
422    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
423    break;
424
425  case GC_TRAP_FUNCTION_ENSURE_STATIC_CONSES:
426    ensure_static_conses(xp,tcr,32768);
427    break;
428
429  case GC_TRAP_FUNCTION_FLASH_FREEZE:
430    untenure_from_area(tenured_area);
431    gc_like_from_xp(xp,flash_freeze,0);
432    a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
433    tenured_area->static_dnodes = area_dnode(a->active, a->low);
434    if (egc_was_enabled) {
435      tenure_to_area(tenured_area);
436    }
437    xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
438    break;
439
[14807]440  case GC_TRAP_FUNCTION_ALLOCATION_CONTROL:
441    switch(arg) {
442    case 0: /* disable if allocation enabled */
443      xpGPR(xp, arg_z) = lisp_nil;
444      if (allocation_enabled) {
445        TCR *other_tcr;
446        ExceptionInformation *other_context;
447        suspend_other_threads(true);
448        normalize_tcr(xp,tcr,false);
449        for (other_tcr=tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
450          other_context = other_tcr->pending_exception_context;
451          if (other_context == NULL) {
452            other_context = other_tcr->suspend_context;
453          }
454          normalize_tcr(other_context, other_tcr, true);
455        }
456        allocation_enabled = false;
457        xpGPR(xp, arg_z) = t_value;
458        resume_other_threads(true);
459      }
460      break;
461
462    case 1:                     /* enable if disabled */
463      xpGPR(xp, arg_z) = lisp_nil;
464      if (!allocation_enabled) {
465        allocation_enabled = true;
466        xpGPR(xp, arg_z) = t_value;
467      }
468      break;
469
470    default:
471      xpGPR(xp, arg_z) = lisp_nil;
472      if (allocation_enabled) {
473        xpGPR(xp, arg_z) = t_value;
474      }
475      break;
476    }
477    break;
478
479       
[14119]480  default:
481    update_bytes_allocated(tcr, (void *) ptr_from_lispobj(xpGPR(xp, allocptr)));
482
483    if (selector == GC_TRAP_FUNCTION_IMMEDIATE_GC) {
484      if (!full_gc_deferred) {
485        gc_from_xp(xp, 0L);
486        break;
487      }
488      /* Tried to do a full GC when gc was disabled.  That failed,
489         so try full GC now */
490      selector = GC_TRAP_FUNCTION_GC;
491    }
492   
493    if (egc_was_enabled) {
494      egc_control(false, (BytePtr) a->active);
495    }
496    gc_from_xp(xp, 0L);
497    if (gc_deferred > gc_previously_deferred) {
498      full_gc_deferred = 1;
499    } else {
500      full_gc_deferred = 0;
501    }
502    if (selector > GC_TRAP_FUNCTION_GC) {
503      if (selector & GC_TRAP_FUNCTION_IMPURIFY) {
504        impurify_from_xp(xp, 0L);
505        /*        nrs_GC_EVENT_STATUS_BITS.vcell |= gc_integrity_check_bit; */
506        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
507        gc_from_xp(xp, 0L);
508      }
509      if (selector & GC_TRAP_FUNCTION_PURIFY) {
510        purify_from_xp(xp, 0L);
[15370]511        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
[14119]512        gc_from_xp(xp, 0L);
513      }
514      if (selector & GC_TRAP_FUNCTION_SAVE_APPLICATION) {
515        OSErr err;
516        extern OSErr save_application(unsigned, Boolean);
517        TCR *tcr = get_tcr(true);
518        area *vsarea = tcr->vs_area;
519       
520        nrs_TOPLFUNC.vcell = *((LispObj *)(vsarea->high)-1);
521        err = save_application(arg, egc_was_enabled);
522        if (err == noErr) {
523          _exit(0);
524        }
525        fatal_oserr(": save_application", err);
526      }
527      switch (selector) {
528
529
530      case GC_TRAP_FUNCTION_FREEZE:
531        a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
532        tenured_area->static_dnodes = area_dnode(a->active, a->low);
533        xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
534        break;
535      default:
536        break;
537      }
538    }
539   
540    if (egc_was_enabled) {
541      egc_control(true, NULL);
542    }
543    break;
544   
545  }
546
547  adjust_exception_pc(xp,4);
548  return true;
549}
550
551
552
553void
554signal_stack_soft_overflow(ExceptionInformation *xp, unsigned reg)
555{
556  /* The cstack just overflowed.  Force the current thread's
557     control stack to do so until all stacks are well under their overflow
558     limits.
559  */
560
561#if 0
562  lisp_global(CS_OVERFLOW_LIMIT) = CS_OVERFLOW_FORCE_LIMIT; /* force unsigned traps to fail */
563#endif
564  handle_error(xp, error_stack_overflow, reg, NULL);
565}
566
567/*
568  Lower (move toward 0) the "end" of the soft protected area associated
569  with a by a page, if we can.
570*/
571
572void
573adjust_soft_protection_limit(area *a)
574{
575  char *proposed_new_soft_limit = a->softlimit - 4096;
576  protected_area_ptr p = a->softprot;
577 
578  if (proposed_new_soft_limit >= (p->start+16384)) {
579    p->end = proposed_new_soft_limit;
580    p->protsize = p->end-p->start;
581    a->softlimit = proposed_new_soft_limit;
582  }
583  protect_area(p);
584}
585
586void
587restore_soft_stack_limit(unsigned stkreg)
588{
589  area *a;
590  TCR *tcr = get_tcr(true);
591
592  switch (stkreg) {
593  case Rsp:
594    a = tcr->cs_area;
595    if ((a->softlimit - 4096) > (a->hardlimit + 16384)) {
596      a->softlimit -= 4096;
597    }
598    tcr->cs_limit = (LispObj)ptr_to_lispobj(a->softlimit);
599    break;
600  case vsp:
601    a = tcr->vs_area;
602    adjust_soft_protection_limit(a);
603    break;
604  }
605}
606
607/* Maybe this'll work someday.  We may have to do something to
608   make the thread look like it's not handling an exception */
609void
610reset_lisp_process(ExceptionInformation *xp)
611{
[14965]612#if 0
[14119]613  TCR *tcr = get_tcr(true);
614  catch_frame *last_catch = (catch_frame *) ptr_from_lispobj(untag(tcr->catch_top));
615
616  tcr->save_allocptr = (void *) ptr_from_lispobj(xpGPR(xp, allocptr));
617
618  tcr->save_vsp = (LispObj *) ptr_from_lispobj(((lisp_frame *)ptr_from_lispobj(last_catch->csp))->savevsp);
619
620  start_lisp(tcr, 1);
[14965]621#endif
[14119]622}
623
624/*
625  This doesn't GC; it returns true if it made enough room, false
626  otherwise.
627  If "extend" is true, it can try to extend the dynamic area to
628  satisfy the request.
629*/
630
631Boolean
632new_heap_segment(ExceptionInformation *xp, natural need, Boolean extend, TCR *tcr)
633{
634  area *a;
635  natural newlimit, oldlimit;
636  natural log2_allocation_quantum = tcr->log2_allocation_quantum;
637
638  a  = active_dynamic_area;
639  oldlimit = (natural) a->active;
640  newlimit = (align_to_power_of_2(oldlimit, log2_allocation_quantum) +
641              align_to_power_of_2(need, log2_allocation_quantum));
642  if (newlimit > (natural) (a->high)) {
643    if (extend) {
644      signed_natural inhibit = (signed_natural)(lisp_global(GC_INHIBIT_COUNT));
645      natural extend_by = inhibit ? 0 : lisp_heap_gc_threshold;
646      do {
647        if (resize_dynamic_heap(a->active, (newlimit-oldlimit)+extend_by)) {
648          break;
649        }
650        extend_by = align_to_power_of_2(extend_by>>1, log2_allocation_quantum);
651        if (extend_by < 4<<20) {
652          return false;
653        }
654      } while (1);
655    } else {
656      return false;
657    }
658  }
659  a->active = (BytePtr) newlimit;
660  tcr->last_allocptr = (void *)newlimit;
661  xpGPR(xp,allocptr) = (LispObj) newlimit;
662  tcr->save_allocbase = (void*) oldlimit;
663
664  return true;
665}
666
667 
668void
669update_area_active (area **aptr, BytePtr value)
670{
671  area *a = *aptr;
672  for (; a; a = a->older) {
673    if ((a->low <= value) && (a->high >= value)) break;
674  };
675  if (a == NULL) Bug(NULL, "Can't find active area");
676  a->active = value;
677  *aptr = a;
678
679  for (a = a->younger; a; a = a->younger) {
680    a->active = a->high;
681  }
682}
683
684LispObj *
685tcr_frame_ptr(TCR *tcr)
686{
687  ExceptionInformation *xp;
688  LispObj *bp = NULL;
689
690  if (tcr->pending_exception_context)
691    xp = tcr->pending_exception_context;
692  else {
693    xp = tcr->suspend_context;
694  }
695  if (xp) {
696    bp = (LispObj *) xpGPR(xp, Rsp);
697  }
698  return bp;
699}
700
701void
702normalize_tcr(ExceptionInformation *xp, TCR *tcr, Boolean is_other_tcr)
[14807]703
[14119]704{
705  void *cur_allocptr = NULL;
706  LispObj freeptr = 0;
707
708  if (xp) {
709    if (is_other_tcr) {
710      pc_luser_xp(xp, tcr, NULL);
711      freeptr = xpGPR(xp, allocptr);
712      if (fulltag_of(freeptr) == 0){
713        cur_allocptr = (void *) ptr_from_lispobj(freeptr);
714      }
715    }
716    update_area_active((area **)&tcr->cs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp)));
717    update_area_active((area **)&tcr->vs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, vsp)));
718  } else {
719    /* In ff-call. */
720    cur_allocptr = (void *) (tcr->save_allocptr);
721    update_area_active((area **)&tcr->vs_area, (BytePtr) tcr->save_vsp);
722    update_area_active((area **)&tcr->cs_area, (BytePtr) tcr->last_lisp_frame);
723  }
724
725
726  tcr->save_allocptr = tcr->save_allocbase = (void *)VOID_ALLOCPTR;
727  if (cur_allocptr) {
728    update_bytes_allocated(tcr, cur_allocptr);
729    if (freeptr) {
730      xpGPR(xp, allocptr) = VOID_ALLOCPTR;
731    }
732  }
733}
734
735TCR *gc_tcr = NULL;
736
737/* Suspend and "normalize" other tcrs, then call a gc-like function
738   in that context.  Resume the other tcrs, then return what the
739   function returned */
740
741signed_natural
742gc_like_from_xp(ExceptionInformation *xp, 
743                signed_natural(*fun)(TCR *, signed_natural), 
744                signed_natural param)
745{
746  TCR *tcr = get_tcr(true), *other_tcr;
747  int result;
748  signed_natural inhibit;
749
750  suspend_other_threads(true);
751  inhibit = (signed_natural)(lisp_global(GC_INHIBIT_COUNT));
752  if (inhibit != 0) {
753    if (inhibit > 0) {
754      lisp_global(GC_INHIBIT_COUNT) = (LispObj)(-inhibit);
755    }
756    resume_other_threads(true);
757    gc_deferred++;
758    return 0;
759  }
760  gc_deferred = 0;
761
762  gc_tcr = tcr;
763
764  xpGPR(xp, allocptr) = VOID_ALLOCPTR;
765
766  normalize_tcr(xp, tcr, false);
767
768
769  for (other_tcr = tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
770    if (other_tcr->pending_exception_context) {
771      other_tcr->gc_context = other_tcr->pending_exception_context;
772    } else if (other_tcr->valence == TCR_STATE_LISP) {
773      other_tcr->gc_context = other_tcr->suspend_context;
774    } else {
775      /* no pending exception, didn't suspend in lisp state:
776         must have executed a synchronous ff-call.
777      */
778      other_tcr->gc_context = NULL;
779    }
780    normalize_tcr(other_tcr->gc_context, other_tcr, true);
781  }
782   
783
784
785  result = fun(tcr, param);
786
787  other_tcr = tcr;
788  do {
789    other_tcr->gc_context = NULL;
790    other_tcr = other_tcr->next;
791  } while (other_tcr != tcr);
792
793  gc_tcr = NULL;
794
795  resume_other_threads(true);
796
797  return result;
798
799}
800
801
802
803/* Returns #bytes freed by invoking GC */
804
805signed_natural
806gc_from_tcr(TCR *tcr, signed_natural param)
807{
808  area *a;
809  BytePtr oldfree, newfree;
810  BytePtr oldend, newend;
811
812  a = active_dynamic_area;
813  oldend = a->high;
814  oldfree = a->active;
815  gc(tcr, param);
816  newfree = a->active;
817  newend = a->high;
818#if 0
819  fprintf(dbgout, "End GC  in 0x%lx\n", tcr);
820#endif
821  return ((oldfree-newfree)+(newend-oldend));
822}
823
824signed_natural
825gc_from_xp(ExceptionInformation *xp, signed_natural param)
826{
827  signed_natural status = gc_like_from_xp(xp, gc_from_tcr, param);
828
829  freeGCptrs();
830  return status;
831}
832
833signed_natural
834purify_from_xp(ExceptionInformation *xp, signed_natural param)
835{
836  return gc_like_from_xp(xp, purify, param);
837}
838
839signed_natural
840impurify_from_xp(ExceptionInformation *xp, signed_natural param)
841{
842  return gc_like_from_xp(xp, impurify, param);
843}
844
845
846
847
848
849
850protection_handler
851 * protection_handlers[] = {
852   do_spurious_wp_fault,
853   do_soft_stack_overflow,
854   do_soft_stack_overflow,
855   do_soft_stack_overflow,
856   do_hard_stack_overflow,   
857   do_hard_stack_overflow,
858   do_hard_stack_overflow
859   };
860
861
862Boolean
863is_write_fault(ExceptionInformation *xp, siginfo_t *info)
864{
[14190]865  return ((xpFaultStatus(xp) & 0x800) != 0);
[14119]866}
867
868Boolean
869handle_protection_violation(ExceptionInformation *xp, siginfo_t *info, TCR *tcr, int old_valence)
870{
871  BytePtr addr;
872  protected_area_ptr area;
873  protection_handler *handler;
874  extern Boolean touch_page(void *);
875  extern void touch_page_end(void);
876
877#ifdef LINUX
878  addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
879#else
880  if (info) {
881    addr = (BytePtr)(info->si_addr);
882  } else {
883    addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
884  }
885#endif
886
887  if (addr && (addr == tcr->safe_ref_address)) {
888    adjust_exception_pc(xp,4);
889
890    xpGPR(xp,imm0) = 0;
891    return true;
892  }
893
894  if (xpPC(xp) == (pc)touch_page) {
895    xpGPR(xp,imm0) = 0;
896    xpPC(xp) = (pc)touch_page_end;
897    return true;
898  }
899
900
901  if (is_write_fault(xp,info)) {
902    area = find_protected_area(addr);
903    if (area != NULL) {
904      handler = protection_handlers[area->why];
905      return handler(xp, area, addr);
906    } else {
907      if ((addr >= readonly_area->low) &&
908          (addr < readonly_area->active)) {
909        UnProtectMemory((LogicalAddress)(truncate_to_power_of_2(addr,log2_page_size)),
910                        page_size);
911        return true;
912      }
913    }
914  }
915  if (old_valence == TCR_STATE_LISP) {
916    LispObj cmain = nrs_CMAIN.vcell;
917   
918    if ((fulltag_of(cmain) == fulltag_misc) &&
919      (header_subtag(header_of(cmain)) == subtag_macptr)) {
920     
921      callback_for_trap(nrs_CMAIN.vcell, xp, is_write_fault(xp,info)?SIGBUS:SIGSEGV, (natural)addr, NULL);
922    }
923  }
924  return false;
925}
926
927
928
929
930
931OSStatus
932do_hard_stack_overflow(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
933{
934#ifdef SUPPORT_PRAGMA_UNUSED
935#pragma unused(area,addr)
936#endif
937  reset_lisp_process(xp);
938  return -1;
939}
940
941extern area*
942allocate_vstack(natural useable);       /* This is in "pmcl-kernel.c" */
943
944extern area*
945allocate_tstack(natural useable);       /* This is in "pmcl-kernel.c" */
946
947
948
949
950
951
952Boolean
953lisp_frame_p(lisp_frame *spPtr)
954{
955  return (spPtr->marker == lisp_frame_marker);
956}
957
958
959int ffcall_overflow_count = 0;
960
961
962
963
964
965
966/* Note: CURRENT_VS (CURRENT_TS) is always either the area containing
967  the current value of VSP (TSP) or an older area.  */
968
969OSStatus
970do_vsp_overflow (ExceptionInformation *xp, BytePtr addr)
971{
972  TCR* tcr = get_tcr(true);
973  area *a = tcr->vs_area;
974  protected_area_ptr vsp_soft = a->softprot;
975  unprotect_area(vsp_soft);
976  signal_stack_soft_overflow(xp,vsp);
977  return 0;
978}
979
980
981
982OSStatus
983do_soft_stack_overflow(ExceptionInformation *xp, protected_area_ptr prot_area, BytePtr addr)
984{
985  /* Trying to write into a guard page on the vstack or tstack.
986     Allocate a new stack segment, emulate stwu and stwux for the TSP, and
987     signal an error_stack_overflow condition.
988      */
989  if (prot_area->why == kVSPsoftguard) {
990    return do_vsp_overflow(xp,addr);
991  }
992  unprotect_area(prot_area);
993  signal_stack_soft_overflow(xp,Rsp);
994  return 0;
995}
996
997OSStatus
998do_spurious_wp_fault(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
999{
1000#ifdef SUPPORT_PRAGMA_UNUSED
1001#pragma unused(xp,area,addr)
1002#endif
1003  return -1;
1004}
1005
1006
1007
1008
1009     
1010
1011
1012
1013
1014
1015Boolean
1016handle_sigfpe(ExceptionInformation *xp, TCR *tcr)
1017{
1018  return false;
1019}
1020
1021
1022Boolean
1023handle_unimplemented_instruction(ExceptionInformation *xp,
1024                                 opcode instruction,
1025                                 TCR *tcr)
1026{
1027
1028  return false;
1029}
1030
1031Boolean
1032handle_exception(int xnum, 
1033                 ExceptionInformation *xp, 
1034                 TCR *tcr, 
1035                 siginfo_t *info,
1036                 int old_valence)
1037{
1038  pc program_counter;
1039  opcode instruction = 0;
1040
1041  if (old_valence != TCR_STATE_LISP) {
1042    return false;
1043  }
1044
1045  program_counter = xpPC(xp);
1046 
1047  if ((xnum == SIGILL) | (xnum == SIGTRAP)) {
1048    instruction = *program_counter;
1049  }
1050
1051  if (IS_ALLOC_TRAP(instruction)) {
1052    return handle_alloc_trap(xp, tcr);
1053  } else if ((xnum == SIGSEGV) ||
1054             (xnum == SIGBUS)) {
1055    return handle_protection_violation(xp, info, tcr, old_valence);
1056  } else if (xnum == SIGFPE) {
1057    return handle_sigfpe(xp, tcr);
1058  } else if ((xnum == SIGILL)) {
1059    if (IS_GC_TRAP(instruction)) {
1060      return handle_gc_trap(xp, tcr);
1061    } else if (IS_UUO(instruction)) {
1062      return handle_uuo(xp, info, instruction);
1063    } else {
1064      return handle_unimplemented_instruction(xp,instruction,tcr);
1065    }
1066  } else if (xnum == SIGNAL_FOR_PROCESS_INTERRUPT) {
1067    tcr->interrupt_pending = 0;
1068    callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
1069    return true;
1070  }
1071
1072  return false;
1073}
1074
1075void
1076adjust_exception_pc(ExceptionInformation *xp, int delta)
1077{
1078  xpPC(xp) += (delta >> 2);
1079}
1080
1081
1082/*
1083  This wants to scan backwards until "where" points to an instruction
1084   whose major opcode is either 63 (double-float) or 59 (single-float)
1085*/
1086
1087OSStatus
1088handle_fpux_binop(ExceptionInformation *xp, pc where)
1089{
1090  OSStatus err = -1;
1091  opcode *there = (opcode *) where, instr, errnum = 0;
1092  return err;
1093}
1094
1095Boolean
1096handle_uuo(ExceptionInformation *xp, siginfo_t *info, opcode the_uuo) 
1097{
1098  unsigned 
1099    format = UUO_FORMAT(the_uuo);
1100  Boolean handled = false;
1101  int bump = 4;
1102  TCR *tcr = get_tcr(true);
1103
1104  switch (format) {
1105  case uuo_format_kernel_service:
1106    {
1107      TCR * target = (TCR *)xpGPR(xp,arg_z);
1108      int service = UUO_UNARY_field(the_uuo);
1109
1110      switch (service) {
1111      case error_propagate_suspend:
1112        handled = true;
1113        break;
1114      case error_interrupt:
1115        xpGPR(xp,imm0) = (LispObj) raise_thread_interrupt(target);
1116        handled = true;
1117        break;
1118      case error_suspend:
1119        xpGPR(xp,imm0) = (LispObj) lisp_suspend_tcr(target);
1120        handled = true;
1121        break;
1122      case error_suspend_all:
1123        lisp_suspend_other_threads();
1124        handled = true;
1125        break;
1126      case error_resume:
1127        xpGPR(xp,imm0) = (LispObj) lisp_resume_tcr(target);
1128        handled = true;
1129        break;
1130      case error_resume_all:
1131        lisp_resume_other_threads();
1132        handled = true;
1133        break;
1134      case error_kill:
1135        xpGPR(xp,imm0) = (LispObj)kill_tcr(target);
1136        handled = true;
1137        break;
1138      case error_allocate_list:
1139        allocate_list(xp,tcr);
1140        handled = true;
1141        break;
1142      default:
1143        handled = false;
1144        break;
1145      }
1146      break;
1147    }
1148
1149  case uuo_format_unary:
1150    switch(UUO_UNARY_field(the_uuo)) {
1151    case 3:
1152      if (extend_tcr_tlb(tcr,xp,UUOA_field(the_uuo))) {
1153        handled = true;
1154        bump = 4;
1155        break;
1156      }
1157      /* fall in */
1158    default:
1159      handled = false;
1160      break;
1161
1162    }
1163    break;
1164
1165  case uuo_format_nullary:
1166    switch (UUOA_field(the_uuo)) {
1167    case 3:
1168      adjust_exception_pc(xp, bump);
1169      bump = 0;
1170      lisp_Debugger(xp, info, debug_entry_dbg, false, "Lisp Breakpoint");
1171      handled = true;
1172      break;
1173
1174    case 4:
1175      tcr->interrupt_pending = 0;
[14545]1176      callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
[14119]1177      handled = true;
1178      break;
1179    default:
1180      handled = false;
1181      break;
1182    }
1183    break;
1184
1185
1186  case uuo_format_error_lisptag:
1187  case uuo_format_error_fulltag:
1188  case uuo_format_error_xtype:
1189  case uuo_format_cerror_lisptag:
1190  case uuo_format_cerror_fulltag:
1191  case uuo_format_cerror_xtype:
1192  case uuo_format_nullary_error:
1193  case uuo_format_unary_error:
1194  case uuo_format_binary_error:
1195  case uuo_format_ternary:
[15158]1196  case uuo_format_ternary2:
[14119]1197    handled = handle_error(xp,0,the_uuo, &bump);
1198    break;
1199
1200  default:
1201    handled = false;
1202    bump = 0;
1203  }
1204 
1205  if (handled && bump) {
1206    adjust_exception_pc(xp, bump);
1207  }
1208  return handled;
1209}
1210
1211natural
1212register_codevector_contains_pc (natural lisp_function, pc where)
1213{
1214  natural code_vector, size;
1215
1216  if ((fulltag_of(lisp_function) == fulltag_misc) &&
1217      (header_subtag(header_of(lisp_function)) == subtag_function)) {
1218    code_vector = deref(lisp_function, 2);
1219    size = header_element_count(header_of(code_vector)) << 2;
1220    if ((untag(code_vector) < (natural)where) && 
1221        ((natural)where < (code_vector + size)))
1222      return(code_vector);
1223  }
1224
1225  return(0);
1226}
1227
1228Boolean
1229callback_for_trap (LispObj callback_macptr, ExceptionInformation *xp, natural info,natural arg, int *bumpP)
1230{
1231  return callback_to_lisp(callback_macptr, xp, info,arg, bumpP);
1232}
1233
1234Boolean
1235callback_to_lisp (LispObj callback_macptr, ExceptionInformation *xp,
1236                  natural arg1, natural arg2, int *bumpP)
1237{
1238  natural  callback_ptr;
1239  area *a;
[14271]1240  natural fnreg = Rfn,  codevector, offset;
[14119]1241  pc where = xpPC(xp);
1242  int delta;
1243
1244  codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1245  if (codevector == 0) {
1246    fnreg = nfn;
1247    codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1248    if (codevector == 0) {
1249      fnreg = 0;
1250    }
1251  }
1252  if (codevector) {
1253    offset = (natural)where - (codevector - (fulltag_misc-node_size));
1254  } else {
1255    offset = (natural)where;
1256  }
1257                                                 
1258                                               
1259
1260  TCR *tcr = get_tcr(true);
1261
1262  /* Put the active stack pointer where .SPcallback expects it */
1263  a = tcr->cs_area;
1264  a->active = (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp));
1265
1266  /* Copy globals from the exception frame to tcr */
1267  tcr->save_allocptr = (void *)ptr_from_lispobj(xpGPR(xp, allocptr));
1268  tcr->save_vsp = (LispObj*) ptr_from_lispobj(xpGPR(xp, vsp));
1269
1270
1271
1272  /* Call back.
1273     Lisp will handle trampolining through some code that
1274     will push lr/fn & pc/nfn stack frames for backtrace.
1275  */
1276  callback_ptr = ((macptr *)ptr_from_lispobj(untag(callback_macptr)))->address;
1277  UNLOCK(lisp_global(EXCEPTION_LOCK), tcr);
1278  delta = ((int (*)())callback_ptr) (xp, arg1, arg2, fnreg, offset);
1279  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1280
1281  if (bumpP) {
1282    *bumpP = delta;
1283  }
1284
1285  /* Copy GC registers back into exception frame */
1286  xpGPR(xp, allocptr) = (LispObj) ptr_to_lispobj(tcr->save_allocptr);
1287  return true;
1288}
1289
1290area *
1291allocate_no_stack (natural size)
1292{
1293#ifdef SUPPORT_PRAGMA_UNUSED
1294#pragma unused(size)
1295#endif
1296
1297  return (area *) NULL;
1298}
1299
1300
1301
1302
1303
1304
1305/* callback to (symbol-value cmain) if it is a macptr,
1306   otherwise report cause and function name to console.
1307   Returns noErr if exception handled OK */
1308OSStatus
1309handle_trap(ExceptionInformation *xp, opcode the_trap, pc where, siginfo_t *info)
1310{
1311  LispObj   cmain = nrs_CMAIN.vcell;
1312  TCR *tcr = TCR_FROM_TSD(xpGPR(xp, rcontext));
1313
1314}
1315
1316
1317
1318
1319void non_fatal_error( char *msg )
1320{
1321  fprintf( dbgout, "Non-fatal error: %s.\n", msg );
1322  fflush( dbgout );
1323}
1324
1325
1326
1327Boolean
1328handle_error(ExceptionInformation *xp, unsigned arg1, unsigned arg2, int *bumpP)
1329{
1330  LispObj   errdisp = nrs_ERRDISP.vcell;
1331
1332  if ((fulltag_of(errdisp) == fulltag_misc) &&
1333      (header_subtag(header_of(errdisp)) == subtag_macptr)) {
1334    /* errdisp is a macptr, we can call back to lisp */
1335    return callback_for_trap(errdisp, xp, arg1, arg2, bumpP);
1336    }
1337
1338  return false;
1339}
1340               
1341
1342/*
1343   Current thread has all signals masked.  Before unmasking them,
1344   make it appear that the current thread has been suspended.
1345   (This is to handle the case where another thread is trying
1346   to GC before this thread is able to sieze the exception lock.)
1347*/
1348int
1349prepare_to_wait_for_exception_lock(TCR *tcr, ExceptionInformation *context)
1350{
1351  int old_valence = tcr->valence;
1352
1353  tcr->pending_exception_context = context;
1354  tcr->valence = TCR_STATE_EXCEPTION_WAIT;
1355
1356  ALLOW_EXCEPTIONS(context);
1357  return old_valence;
1358} 
1359
1360void
1361wait_for_exception_lock_in_handler(TCR *tcr, 
1362                                   ExceptionInformation *context,
1363                                   xframe_list *xf)
1364{
1365
1366  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1367  xf->curr = context;
1368  xf->prev = tcr->xframe;
1369  tcr->xframe =  xf;
1370  tcr->pending_exception_context = NULL;
1371  tcr->valence = TCR_STATE_FOREIGN; 
1372}
1373
1374void
1375unlock_exception_lock_in_handler(TCR *tcr)
1376{
1377  tcr->pending_exception_context = tcr->xframe->curr;
1378  tcr->xframe = tcr->xframe->prev;
1379  tcr->valence = TCR_STATE_EXCEPTION_RETURN;
1380  UNLOCK(lisp_global(EXCEPTION_LOCK),tcr);
1381}
1382
1383/*
1384   If an interrupt is pending on exception exit, try to ensure
1385   that the thread sees it as soon as it's able to run.
1386*/
1387void
1388raise_pending_interrupt(TCR *tcr)
1389{
1390  if (TCR_INTERRUPT_LEVEL(tcr) > 0) {
1391    pthread_kill((pthread_t)ptr_from_lispobj(tcr->osid), SIGNAL_FOR_PROCESS_INTERRUPT);
1392  }
1393}
1394
1395void
1396exit_signal_handler(TCR *tcr, int old_valence, natural old_last_lisp_frame)
1397{
[14549]1398#ifndef ANDROID
[14119]1399  sigset_t mask;
1400  sigfillset(&mask);
[14549]1401#else
[15093]1402  int mask [] = {-1,-1};
[14549]1403#endif
[14119]1404 
[14549]1405  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask, NULL);
[14119]1406  tcr->valence = old_valence;
1407  tcr->pending_exception_context = NULL;
1408  tcr->last_lisp_frame = old_last_lisp_frame;
1409}
1410
1411
1412void
[15093]1413signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context
1414#ifdef DARWIN
1415, TCR *tcr, int old_valence, natural old_last_lisp_frame
1416#endif
1417)
[14119]1418{
1419  xframe_list xframe_link;
[15093]1420#ifndef DARWIN
[14119]1421   
[15093]1422    TCR *tcr = (TCR *) get_interrupt_tcr(false);
[14119]1423 
1424    /* The signal handler's entered with all signals (notably the
1425       thread_suspend signal) blocked.  Don't allow any other signals
1426       (notably the thread_suspend signal) to preempt us until we've
1427       set the TCR's xframe slot to include the current exception
1428       context.
1429    */
1430   
[15093]1431    natural  old_last_lisp_frame = tcr->last_lisp_frame;
1432    int old_valence;
1433
[14119]1434    tcr->last_lisp_frame = xpGPR(context,Rsp);
1435    old_valence = prepare_to_wait_for_exception_lock(tcr, context);
[15093]1436#endif
[14119]1437
1438  if (tcr->flags & (1<<TCR_FLAG_BIT_PENDING_SUSPEND)) {
1439    CLR_TCR_FLAG(tcr, TCR_FLAG_BIT_PENDING_SUSPEND);
1440    pthread_kill(pthread_self(), thread_suspend_signal);
1441  }
1442
1443 
1444  wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1445  if ((!handle_exception(signum, context, tcr, info, old_valence))) {
1446    char msg[512];
1447    snprintf(msg, sizeof(msg), "Unhandled exception %d at 0x%lx, context->regs at #x%lx", signum, xpPC(context), (natural)xpGPRvector(context));
[15093]1448    if (lisp_Debugger(context, info, signum, (old_valence != TCR_STATE_LISP), msg)) {
[14119]1449      SET_TCR_FLAG(tcr,TCR_FLAG_BIT_PROPAGATE_EXCEPTION);
1450    }
1451  }
1452  unlock_exception_lock_in_handler(tcr);
1453
1454  /* This thread now looks like a thread that was suspended while
1455     executing lisp code.  If some other thread gets the exception
1456     lock and GCs, the context (this thread's suspend_context) will
1457     be updated.  (That's only of concern if it happens before we
1458     can return to the kernel/to the Mach exception handler).
1459  */
[15470]1460  exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1461  raise_pending_interrupt(tcr);
[14119]1462}
1463
[14517]1464
1465void
1466sigill_handler(int signum, siginfo_t *info, ExceptionInformation  *xp)
1467{
1468  pc program_counter = xpPC(xp);
1469  opcode instr = *program_counter;
1470
1471  if (IS_UUO(instr)) {
1472    natural psr = xpPSR(xp);
1473    Boolean opcode_matched_condition = false,
1474      flip = ((instr & (1<<28)) != 0);
1475   
1476
1477    switch (instr >> 29) {
1478    case 0: 
1479      opcode_matched_condition = ((psr & PSR_Z_MASK) != 0);
1480      break;
1481    case 1:
1482      opcode_matched_condition = ((psr & PSR_C_MASK) != 0);
1483      break;
1484    case 2:
1485      opcode_matched_condition = ((psr & PSR_N_MASK) != 0);
1486      break;
1487    case 3:
1488      opcode_matched_condition = ((psr & PSR_V_MASK) != 0);
1489      break;
1490    case 4:
1491      opcode_matched_condition = (((psr & PSR_C_MASK) != 0) &&
1492                                  ((psr & PSR_Z_MASK) == 0));
1493      break;
1494    case 5:
1495      opcode_matched_condition = (((psr & PSR_N_MASK) != 0) ==
1496                                  ((psr & PSR_V_MASK) != 0));
1497      break;
1498    case 6:
1499      opcode_matched_condition = ((((psr & PSR_N_MASK) != 0) ==
1500                                   ((psr & PSR_V_MASK) != 0)) &&
1501                                  ((psr & PSR_Z_MASK) == 0));
1502      break;
1503    case 7:
1504      opcode_matched_condition = true;
1505      flip = false;
1506      break;
1507    }
1508    if (flip) {
1509      opcode_matched_condition = !opcode_matched_condition;
1510    }
1511    if (!opcode_matched_condition) {
1512      adjust_exception_pc(xp,4);
1513      return;
1514    }
1515  }
[15093]1516  signal_handler(signum,info,xp);
[14517]1517}
1518
1519
[14119]1520#ifdef USE_SIGALTSTACK
1521void
1522invoke_handler_on_main_stack(int signo, siginfo_t *info, ExceptionInformation *xp, void *return_address, void *handler)
1523{
1524  ExceptionInformation *xp_copy;
1525  siginfo_t *info_copy;
1526  extern void call_handler_on_main_stack(int, siginfo_t *, ExceptionInformation *,void *, void *);
1527 
1528  BytePtr target_sp= (BytePtr)xpGPR(xp,Rsp);
1529  target_sp -= sizeof(ucontext_t);
1530  xp_copy = (ExceptionInformation *)target_sp;
1531  memmove(target_sp,xp,sizeof(*xp));
1532  xp_copy->uc_stack.ss_sp = 0;
1533  xp_copy->uc_stack.ss_size = 0;
1534  xp_copy->uc_stack.ss_flags = 0;
1535  xp_copy->uc_link = NULL;
1536  target_sp -= sizeof(siginfo_t);
1537  info_copy = (siginfo_t *)target_sp;
1538  memmove(target_sp,info,sizeof(*info));
1539  call_handler_on_main_stack(signo, info_copy, xp_copy, return_address, handler);
1540}
1541 
1542void
1543altstack_signal_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1544{
1545  TCR *tcr=get_tcr(true);
[14655]1546 
1547  if (signo == SIGBUS) {
1548    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address); 
1549    area *a = tcr->cs_area;
1550    if (((BytePtr)truncate_to_power_of_2(addr,log2_page_size))== a->softlimit) 
1551{
1552      if (mmap(a->softlimit,
1553               page_size,
1554               PROT_READ|PROT_WRITE|PROT_EXEC,
1555               MAP_PRIVATE|MAP_ANON|MAP_FIXED,
1556               -1,
1557               0) == a->softlimit) {
1558        return;
1559      }
1560    }
1561  } else if (signo == SIGSEGV) {
[14119]1562    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address);
1563    area *a = tcr->cs_area;
1564   
1565    if ((addr >= a->low) &&
1566        (addr < a->softlimit)) {
1567      if (addr < a->hardlimit) {
1568        Bug(xp, "hard stack overflow");
1569      } else {
1570        UnProtectMemory(a->hardlimit,a->softlimit-a->hardlimit);
1571      }
1572    }
1573  }
1574  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), signal_handler);
1575}
1576#endif
1577
1578/*
1579  If it looks like we're in the middle of an atomic operation, make
1580  it seem as if that operation is either complete or hasn't started
1581  yet.
1582
1583  The cases handled include:
1584
1585  a) storing into a newly-allocated lisp frame on the stack.
1586  b) marking a newly-allocated TSP frame as containing "raw" data.
1587  c) consing: the GC has its own ideas about how this should be
1588     handled, but other callers would be best advised to back
1589     up or move forward, according to whether we're in the middle
1590     of allocating a cons cell or allocating a uvector.
1591  d) a STMW to the vsp
1592  e) EGC write-barrier subprims.
1593*/
1594
1595extern opcode
1596  egc_write_barrier_start,
1597  egc_write_barrier_end, 
1598  egc_store_node_conditional, 
1599  egc_store_node_conditional_test,
[15755]1600  egc_set_hash_key, egc_set_hash_key_did_store,
1601  egc_gvset, egc_gvset_did_store,
1602  egc_rplaca, egc_rplaca_did_store,
1603  egc_rplacd, egc_rplacd_did_store,
[14119]1604  egc_set_hash_key_conditional,
[14791]1605  egc_set_hash_key_conditional_test,
1606  swap_lr_lisp_frame_temp0,
1607  swap_lr_lisp_frame_arg_z;
[14119]1608
1609
1610extern opcode ffcall_return_window, ffcall_return_window_end;
1611
1612void
1613pc_luser_xp(ExceptionInformation *xp, TCR *tcr, signed_natural *alloc_disp)
1614{
1615  pc program_counter = xpPC(xp);
1616  opcode instr = *program_counter;
1617  lisp_frame *frame = (lisp_frame *)ptr_from_lispobj(xpGPR(xp,Rsp));
1618  LispObj cur_allocptr = xpGPR(xp, allocptr);
1619  int allocptr_tag = fulltag_of(cur_allocptr);
1620 
1621
1622
1623  if ((program_counter < &egc_write_barrier_end) && 
1624      (program_counter >= &egc_write_barrier_start)) {
1625    LispObj *ea = 0, val = 0, root = 0;
1626    bitvector refbits = (bitvector)(lisp_global(REFBITS));
[15755]1627    Boolean need_check_memo = true, need_memoize_root = false;
[14119]1628
1629    if (program_counter >= &egc_set_hash_key_conditional) {
1630      if ((program_counter < &egc_set_hash_key_conditional_test) ||
1631          ((program_counter == &egc_set_hash_key_conditional_test) &&
1632           (! (xpPSR(xp) & PSR_Z_MASK)))) {
1633        return;
1634      }
1635      root = xpGPR(xp,arg_x);
1636      ea = (LispObj *) (root+xpGPR(xp,arg_y)+misc_data_offset);
1637      need_memoize_root = true;
1638    } else if (program_counter >= &egc_store_node_conditional) {
1639      if ((program_counter < &egc_store_node_conditional_test) ||
1640          ((program_counter == &egc_store_node_conditional_test) &&
1641           (! (xpPSR(xp) & PSR_Z_MASK)))) {
1642        /* The conditional store either hasn't been attempted yet, or
1643           has failed.  No need to adjust the PC, or do memoization. */
1644        return;
1645      }
1646      ea = (LispObj*)(xpGPR(xp,arg_x) + xpGPR(xp,imm0));
1647      xpGPR(xp,arg_z) = t_value;
1648    } else if (program_counter >= &egc_set_hash_key) {
[15755]1649      if (program_counter < &egc_set_hash_key_did_store) {
1650        return;
1651      }
[14119]1652      root = xpGPR(xp,arg_x);
1653      val = xpGPR(xp,arg_z);
1654      ea = (LispObj *) (root+xpGPR(xp,arg_y)+misc_data_offset);
1655      need_memoize_root = true;
1656    } else if (program_counter >= &egc_gvset) {
[15755]1657      if (program_counter < &egc_gvset_did_store) {
1658        return;
1659      } 
[14119]1660      ea = (LispObj *) (xpGPR(xp,arg_x)+xpGPR(xp,arg_y)+misc_data_offset);
1661      val = xpGPR(xp,arg_z);
1662    } else if (program_counter >= &egc_rplacd) {
[15755]1663      if (program_counter < &egc_rplacd_did_store) {
1664        return;
1665      } 
[14119]1666      ea = (LispObj *) untag(xpGPR(xp,arg_y));
1667      val = xpGPR(xp,arg_z);
1668    } else {                      /* egc_rplaca */
[15755]1669      if (program_counter < &egc_rplaca_did_store) {
1670        return;
1671      } 
[14119]1672      ea =  ((LispObj *) untag(xpGPR(xp,arg_y)))+1;
1673      val = xpGPR(xp,arg_z);
1674    }
1675    if (need_check_memo) {
1676      natural  bitnumber = area_dnode(ea, lisp_global(REF_BASE));
1677      if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT)) &&
1678          ((LispObj)ea < val)) {
1679        atomic_set_bit(refbits, bitnumber);
1680        if (need_memoize_root) {
1681          bitnumber = area_dnode(root, lisp_global(REF_BASE));
1682          atomic_set_bit(refbits, bitnumber);
1683        }
1684      }
1685    }
1686    xpPC(xp) = xpLR(xp);
1687    return;
1688  }
1689
1690
1691 
1692  if (allocptr_tag != tag_fixnum) {
1693    signed_natural disp = allocptr_displacement(xp);
1694
1695    if (disp) {
1696      /* Being architecturally "at" the alloc trap doesn't tell
1697         us much (in particular, it doesn't tell us whether
1698         or not the thread has committed to taking the trap
1699         and is waiting for the exception lock (or waiting
1700         for the Mach exception thread to tell it how bad
1701         things are) or is about to execute a conditional
1702         trap.
1703         Regardless of which case applies, we want the
1704         other thread to take (or finish taking) the
1705         trap, and we don't want it to consider its
1706         current allocptr to be valid.
1707         The difference between this case (suspend other
1708         thread for GC) and the previous case (suspend
1709         current thread for interrupt) is solely a
1710         matter of what happens after we leave this
1711         function: some non-current thread will stay
1712         suspended until the GC finishes, then take
1713         (or start processing) the alloc trap.   The
1714         current thread will go off and do PROCESS-INTERRUPT
1715         or something, and may return from the interrupt
1716         and need to finish the allocation that got interrupted.
1717      */
1718
1719      if (alloc_disp) {
1720        *alloc_disp = disp;
[14807]1721        xpGPR(xp,allocptr) -= disp;
[14119]1722        /* Leave the PC at the alloc trap.  When the interrupt
1723           handler returns, it'll decrement allocptr by disp
1724           and the trap may or may not be taken.
1725        */
1726      } else {
[14754]1727        Boolean ok = false;
[14807]1728        update_bytes_allocated(tcr, (void *) ptr_from_lispobj(cur_allocptr - disp));
[14354]1729        xpGPR(xp, allocptr) = VOID_ALLOCPTR + disp;
1730        instr = program_counter[-1];
[14754]1731        if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
1732          instr = program_counter[-2];
1733          if (IS_COMPARE_ALLOCPTR_TO_RM(instr)){
1734            xpGPR(xp,RM_field(instr)) = VOID_ALLOCPTR;
1735            ok = true;
1736          }
1737        }
1738        if (ok) {
[14807]1739          /* Clear the carry bit, so that the trap will be taken. */
1740          xpPSR(xp) &= ~PSR_C_MASK;
[14354]1741        } else {
1742          Bug(NULL, "unexpected instruction preceding alloc trap.");
1743        }
[14119]1744      }
1745    } else {
[14354]1746      /* we may be before or after the alloc trap.  If before, set
1747         allocptr to VOID_ALLOCPTR and back up to the start of the
1748         instruction sequence; if after, finish the allocation. */
1749      Boolean before_alloc_trap = false;
1750
[14534]1751      if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
[14533]1752        before_alloc_trap = true;
1753        --program_counter;
1754        instr = *program_counter;
1755      }
[14354]1756      if (IS_COMPARE_ALLOCPTR_TO_RM(instr)) {
1757        before_alloc_trap = true;
1758        --program_counter;
1759        instr = *program_counter;
1760      }
1761      if (IS_LOAD_RD_FROM_ALLOCBASE(instr)) {
1762        before_alloc_trap = true;
1763        --program_counter;
1764        instr = *program_counter;
1765      }
1766      if (IS_SUB_HI_FROM_ALLOCPTR(instr)) {
1767        before_alloc_trap = true;
1768        --program_counter;
1769      }
1770      if (before_alloc_trap) {
1771        xpPC(xp) = program_counter;
1772        xpGPR(xp,allocptr) = VOID_ALLOCPTR;
[14119]1773      } else {
[14354]1774        /* If we're already past the alloc_trap, finish allocating
1775           the object. */
1776        if (allocptr_tag == fulltag_cons) {
1777          finish_allocating_cons(xp);
[14119]1778        } else {
[14354]1779          if (allocptr_tag == fulltag_misc) {
1780            finish_allocating_uvector(xp);
1781          } else {
1782            Bug(xp, "what's being allocated here ?");
1783          }
[14119]1784        }
[14354]1785        /* Whatever we finished allocating, reset allocptr/allocbase to
1786           VOID_ALLOCPTR */
1787        xpGPR(xp,allocptr) = VOID_ALLOCPTR;
[14119]1788      }
[14791]1789      return;
[14119]1790    }
1791    return;
1792  }
[14791]1793  {
1794    lisp_frame *swap_frame = NULL;
1795    pc base = &swap_lr_lisp_frame_temp0;
1796   
1797    if ((program_counter >base)             /* sic */
1798        && (program_counter < (base+3))) {
1799      swap_frame = (lisp_frame *)xpGPR(xp,temp0);
1800    } else {
1801      base = &swap_lr_lisp_frame_arg_z;
1802      if ((program_counter > base) && (program_counter < (base+3))) { 
1803        swap_frame = (lisp_frame *)xpGPR(xp,arg_z);
1804      }
1805    }
1806    if (swap_frame) {
1807      if (program_counter == (base+1)) {
1808        swap_frame->savelr = xpGPR(xp,Rlr);
1809      }
1810      xpGPR(xp,Rlr) = xpGPR(xp,imm0);
1811      xpPC(xp) = base+3;
1812      return;
1813    }
1814  }
[14119]1815}
1816
1817void
1818interrupt_handler (int signum, siginfo_t *info, ExceptionInformation *context)
1819{
1820  TCR *tcr = get_interrupt_tcr(false);
1821  if (tcr) {
1822    if (TCR_INTERRUPT_LEVEL(tcr) < 0) {
1823      tcr->interrupt_pending = 1 << fixnumshift;
1824    } else {
1825      LispObj cmain = nrs_CMAIN.vcell;
1826
1827      if ((fulltag_of(cmain) == fulltag_misc) &&
1828          (header_subtag(header_of(cmain)) == subtag_macptr)) {
1829        /*
1830           This thread can (allegedly) take an interrupt now.
1831           It's tricky to do that if we're executing
1832           foreign code (especially Linuxthreads code, much
1833           of which isn't reentrant.)
1834           If we're unwinding the stack, we also want to defer
1835           the interrupt.
1836        */
1837        if ((tcr->valence != TCR_STATE_LISP) ||
1838            (tcr->unwinding != 0)) {
1839          tcr->interrupt_pending = 1 << fixnumshift;
1840        } else {
1841          xframe_list xframe_link;
1842          int old_valence;
1843          signed_natural disp=0;
1844          natural old_last_lisp_frame = tcr->last_lisp_frame;
1845         
1846          tcr->last_lisp_frame = xpGPR(context,Rsp);
1847          pc_luser_xp(context, tcr, &disp);
1848          old_valence = prepare_to_wait_for_exception_lock(tcr, context);
1849          wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1850          handle_exception(signum, context, tcr, info, old_valence);
1851          if (disp) {
1852            xpGPR(context,allocptr) -= disp;
1853          }
1854          unlock_exception_lock_in_handler(tcr);
1855          exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1856        }
1857      }
1858    }
1859  }
1860#ifdef DARWIN
1861    DarwinSigReturn(context);
1862#endif
1863}
1864
1865#ifdef USE_SIGALTSTACK
1866void
1867altstack_interrupt_handler(int signum, siginfo_t *info, ExceptionInformation *context)
1868{
1869  invoke_handler_on_main_stack(signum, info, context, __builtin_return_address(0),interrupt_handler);
1870}
1871#endif
1872
1873
1874void
[14873]1875install_signal_handler(int signo, void *handler, unsigned flags)
[14119]1876{
1877  struct sigaction sa;
[14873]1878  int err;
[14549]1879
1880  sigfillset(&sa.sa_mask);
[14119]1881 
1882  sa.sa_sigaction = (void *)handler;
1883  sigfillset(&sa.sa_mask);
[14873]1884  sa.sa_flags = SA_SIGINFO;
1885
1886#ifdef ANDROID
[15025]1887  sa.sa_flags |= SA_NODEFER;
[14873]1888#endif
[14119]1889#ifdef USE_SIGALTSTACK
[14873]1890  if (flags & ON_ALTSTACK)
1891    sa.sa_flags |= SA_ONSTACK;
[14119]1892#endif
[14873]1893  if (flags & RESTART_SYSCALLS)
1894    sa.sa_flags |= SA_RESTART;
1895  if (flags & RESERVE_FOR_LISP) {
1896    extern sigset_t user_signals_reserved;
1897    sigaddset(&user_signals_reserved, signo);
1898  }
[14119]1899
[14873]1900  err = sigaction(signo, &sa, NULL);
1901  if (err) {
1902    perror("sigaction");
1903    exit(1);
1904  }
[14119]1905}
1906
1907
1908void
1909install_pmcl_exception_handlers()
1910{
[15470]1911  install_signal_handler(SIGILL, (void *)sigill_handler, RESERVE_FOR_LISP);
1912  install_signal_handler(SIGSEGV, (void *)ALTSTACK(signal_handler),
1913                         RESERVE_FOR_LISP|ON_ALTSTACK);
1914  install_signal_handler(SIGBUS, (void *)ALTSTACK(signal_handler),
[14873]1915                           RESERVE_FOR_LISP|ON_ALTSTACK);
[14119]1916  install_signal_handler(SIGNAL_FOR_PROCESS_INTERRUPT,
[14873]1917                         (void *)interrupt_handler, RESERVE_FOR_LISP);
[14119]1918  signal(SIGPIPE, SIG_IGN);
1919}
1920
[15093]1921
[14119]1922#ifdef USE_SIGALTSTACK
1923void
1924setup_sigaltstack(area *a)
1925{
1926  stack_t stack;
1927#if 0
1928  stack.ss_sp = a->low;
1929  a->low += SIGSTKSZ*8;
1930#endif
1931  stack.ss_size = SIGSTKSZ*8;
1932  stack.ss_flags = 0;
1933  stack.ss_sp = mmap(NULL,stack.ss_size, PROT_READ|PROT_WRITE|PROT_EXEC,MAP_ANON|MAP_PRIVATE,-1,0);
1934  if (sigaltstack(&stack, NULL) != 0) {
1935    perror("sigaltstack");
1936    exit(-1);
1937  }
1938}
1939#endif
1940
1941void
1942thread_kill_handler(int signum, siginfo_t info, ExceptionInformation *xp)
1943{
1944  TCR *tcr = get_tcr(false);
1945  area *a;
[14549]1946#ifndef ANDROID
[14119]1947  sigset_t mask;
1948 
1949  sigemptyset(&mask);
[14549]1950#else
1951  int mask[] = {0,0};
1952#endif
[14119]1953
1954  if (tcr) {
1955    tcr->valence = TCR_STATE_FOREIGN;
1956    a = tcr->vs_area;
1957    if (a) {
1958      a->active = a->high;
1959    }
1960    a = tcr->cs_area;
1961    if (a) {
1962      a->active = a->high;
1963    }
1964  }
1965 
[14549]1966  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask,NULL);
[14119]1967  pthread_exit(NULL);
1968}
1969
1970#ifdef USE_SIGALTSTACK
1971void
1972altstack_thread_kill_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1973{
1974  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), thread_kill_handler);
1975}
1976#endif
1977
1978void
1979thread_signal_setup()
1980{
1981  thread_suspend_signal = SIG_SUSPEND_THREAD;
1982  thread_kill_signal = SIG_KILL_THREAD;
1983
[14873]1984  install_signal_handler(thread_suspend_signal, (void *)suspend_resume_handler,
[14876]1985                         RESERVE_FOR_LISP|RESTART_SYSCALLS);
[14873]1986  install_signal_handler(thread_kill_signal, (void *)thread_kill_handler,
1987                         RESERVE_FOR_LISP);
[14119]1988}
1989
1990
1991
1992void
1993unprotect_all_areas()
1994{
1995  protected_area_ptr p;
1996
1997  for(p = AllProtectedAreas, AllProtectedAreas = NULL; p; p = p->next) {
1998    unprotect_area(p);
1999  }
2000}
2001
2002/*
2003  A binding subprim has just done "twlle limit_regno,idx_regno" and
2004  the trap's been taken.  Extend the tcr's tlb so that the index will
2005  be in bounds and the new limit will be on a page boundary, filling
2006  in the new page(s) with 'no_thread_local_binding_marker'.  Update
2007  the tcr fields and the registers in the xp and return true if this
2008  all works, false otherwise.
2009
2010  Note that the tlb was allocated via malloc, so realloc can do some
2011  of the hard work.
2012*/
2013Boolean
2014extend_tcr_tlb(TCR *tcr, 
2015               ExceptionInformation *xp, 
2016               unsigned idx_regno)
2017{
2018  unsigned
2019    index = (unsigned) (xpGPR(xp,idx_regno)),
2020    old_limit = tcr->tlb_limit,
2021    new_limit = align_to_power_of_2(index+1,12),
2022    new_bytes = new_limit-old_limit;
2023  LispObj
2024    *old_tlb = tcr->tlb_pointer,
2025    *new_tlb = realloc(old_tlb, new_limit),
2026    *work;
2027
2028  if (new_tlb == NULL) {
2029    return false;
2030  }
2031 
2032  work = (LispObj *) ((BytePtr)new_tlb+old_limit);
2033
2034  while (new_bytes) {
2035    *work++ = no_thread_local_binding_marker;
2036    new_bytes -= sizeof(LispObj);
2037  }
2038  tcr->tlb_pointer = new_tlb;
2039  tcr->tlb_limit = new_limit;
2040  return true;
2041}
2042
2043
2044
2045void
2046exception_init()
2047{
2048  install_pmcl_exception_handlers();
2049}
2050
2051
2052
2053
2054
Note: See TracBrowser for help on using the repository browser.