source: trunk/source/lisp-kernel/arm-exceptions.c @ 16341

Last change on this file since 16341 was 16341, checked in by gb, 5 years ago

pc_luser_xp(): if we interrupt a conditional store, detect its success/failure correctly.

_SPeabi_ff_call_simple: store the vsp in tcr.save_vsp when exiting lisp. This seems to fix ticket:1257 in the trunk.

File size: 52.9 KB
RevLine 
[14119]1/*
2   Copyright (C) 2010 Clozure Associates
3   This file is part of Clozure CL. 
4
5   Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
6   License , known as the LLGPL and distributed with Clozure CL as the
7   file "LICENSE".  The LLGPL consists of a preamble and the LGPL,
8   which is distributed with Clozure CL as the file "LGPL".  Where these
9   conflict, the preamble takes precedence. 
10
11   Clozure CL is referenced in the preamble as the "LIBRARY."
12
13   The LLGPL is also available online at
14   http://opensource.franz.com/preamble.html
15*/
16
17#include "lisp.h"
18#include "lisp-exceptions.h"
19#include "lisp_globals.h"
20#include <ctype.h>
21#include <stdio.h>
22#include <stddef.h>
23#include <string.h>
24#include <stdarg.h>
25#include <errno.h>
26#include <stdio.h>
27#ifdef LINUX
28#include <strings.h>
29#include <sys/mman.h>
[14271]30#ifndef ANDROID
[14119]31#include <fpu_control.h>
32#include <linux/prctl.h>
33#endif
[14271]34#endif
[14119]35
36#ifdef DARWIN
37#include <sys/mman.h>
38#ifndef SA_NODEFER
39#define SA_NODEFER 0
40#endif
41#include <sysexits.h>
42
[14549]43
[14119]44/* a distinguished UUO at a distinguished address */
45extern void pseudo_sigreturn(ExceptionInformation *);
46#endif
47
48
[14197]49#include "threads.h"
[14119]50
[14549]51#ifdef ANDROID
52#define pthread_sigmask(how,in,out) rt_sigprocmask(how,in,out,8)
53#endif
[14119]54
55#ifdef LINUX
56
57void
58enable_fp_exceptions()
59{
60}
61
62void
63disable_fp_exceptions()
64{
65}
66#endif
67
68/*
69  Handle exceptions.
70
71*/
72
73extern LispObj lisp_nil;
74
75extern natural lisp_heap_gc_threshold;
76extern Boolean grow_dynamic_area(natural);
77
[14807]78Boolean allocation_enabled = true;
[14119]79
[16066]80Boolean
81did_gc_notification_since_last_full_gc = false;
[14119]82
83
84
[16066]85
[14119]86int
87page_size = 4096;
88
89int
90log2_page_size = 12;
91
92
93
94
95
96/*
97  If the PC is pointing to an allocation trap, the previous instruction
98  must have decremented allocptr.  Return the non-zero amount by which
99  allocptr was decremented.
100*/
101signed_natural
102allocptr_displacement(ExceptionInformation *xp)
103{
104  pc program_counter = xpPC(xp);
105  opcode instr = *program_counter, prev_instr;
[14515]106  int delta = -3;
[14119]107
108  if (IS_ALLOC_TRAP(instr)) {
109    /* The alloc trap must have been preceded by a cmp and a
110       load from tcr.allocbase. */
[14515]111    if (IS_BRANCH_AROUND_ALLOC_TRAP(program_counter[-1])) {
112      delta = -4;
113    }
114    prev_instr = program_counter[delta];
[14119]115
116    if (IS_SUB_RM_FROM_ALLOCPTR(prev_instr)) {
117      return -((signed_natural)xpGPR(xp,RM_field(prev_instr)));
118    }
119   
120    if (IS_SUB_LO_FROM_ALLOCPTR(prev_instr)) {
121      return -((signed_natural)(prev_instr & 0xff));
122    }
123
124    if (IS_SUB_FROM_ALLOCPTR(prev_instr)) {
125      natural disp = ror(prev_instr&0xff,(prev_instr&0xf00)>>7);
126
[14515]127      instr = program_counter[delta-1];
[14119]128      if (IS_SUB_LO_FROM_ALLOCPTR(instr)) {
129        return -((signed_natural)(disp | (instr & 0xff)));
130      }
131    }
132    Bug(xp, "Can't determine allocation displacement");
133  }
134  return 0;
135}
136
137
138/*
139  A cons cell's been successfully allocated, but the allocptr's
140  still tagged (as fulltag_cons, of course.)  Emulate any instructions
141  that might follow the allocation (stores to the car or cdr, an
142  assignment to the "result" gpr) that take place while the allocptr's
143  tag is non-zero, advancing over each such instruction.  When we're
144  done, the cons cell will be allocated and initialized, the result
145  register will point to it, the allocptr will be untagged, and
146  the PC will point past the instruction that clears the allocptr's
147  tag.
148*/
149void
150finish_allocating_cons(ExceptionInformation *xp)
151{
152  pc program_counter = xpPC(xp);
153  opcode instr;
154  LispObj cur_allocptr = xpGPR(xp, allocptr);
155  cons *c = (cons *)ptr_from_lispobj(untag(cur_allocptr));
156  int target_reg;
157
158  while (1) {
159    instr = *program_counter++;
160
161    if (IS_CLR_ALLOCPTR_TAG(instr)) {
162      xpGPR(xp, allocptr) = untag(cur_allocptr);
163      xpPC(xp) = program_counter;
164      return;
165    } else if (IS_SET_ALLOCPTR_CAR_RD(instr)) {
166      c->car = xpGPR(xp,RD_field(instr));
167    } else if (IS_SET_ALLOCPTR_CDR_RD(instr)) {
168      c->cdr = xpGPR(xp,RD_field(instr));
169    } else {
170      /* assert(IS_SET_ALLOCPTR_RESULT_RD(instr)) */
171      xpGPR(xp,RD_field(instr)) = cur_allocptr;
172    }
173  }
174}
175
176/*
177  We were interrupted in the process of allocating a uvector; we
178  survived the allocation trap, and allocptr is tagged as fulltag_misc.
179  Emulate any instructions which store a header into the uvector,
180  assign the value of allocptr to some other register, and clear
181  allocptr's tag.  Don't expect/allow any other instructions in
182  this environment.
183*/
184void
185finish_allocating_uvector(ExceptionInformation *xp)
186{
187  pc program_counter = xpPC(xp);
188  opcode instr;
189  LispObj cur_allocptr = xpGPR(xp, allocptr);
190  int target_reg;
191
192  while (1) {
193    instr = *program_counter++;
194    if (IS_CLR_ALLOCPTR_TAG(instr)) {
195      xpGPR(xp, allocptr) = untag(cur_allocptr);
196      xpPC(xp) = program_counter;
197      return;
198    }
199    if (IS_SET_ALLOCPTR_HEADER_RD(instr)) {
[14366]200      header_of(cur_allocptr) = xpGPR(xp,RD_field(instr));
[14119]201    } else if (IS_SET_ALLOCPTR_RESULT_RD(instr)) {
202      xpGPR(xp,RD_field(instr)) = cur_allocptr;
203    } else {
204      Bug(xp, "Unexpected instruction following alloc trap at " LISP ":",program_counter);
205    }
206  }
207}
208
209
210Boolean
211allocate_object(ExceptionInformation *xp,
212                natural bytes_needed, 
213                signed_natural disp_from_allocptr,
[16066]214                TCR *tcr,
215                Boolean *crossed_threshold)
[14119]216{
217  area *a = active_dynamic_area;
218
219  /* Maybe do an EGC */
220  if (a->older && lisp_global(OLDEST_EPHEMERAL)) {
221    if (((a->active)-(a->low)) >= a->threshold) {
222      gc_from_xp(xp, 0L);
223    }
224  }
225
226  /* Life is pretty simple if we can simply grab a segment
227     without extending the heap.
228  */
[16066]229  if (new_heap_segment(xp, bytes_needed, false, tcr, crossed_threshold)) {
[14119]230    xpGPR(xp, allocptr) += disp_from_allocptr;
231    return true;
232  }
233 
234  /* It doesn't make sense to try a full GC if the object
235     we're trying to allocate is larger than everything
236     allocated so far.
237  */
238  if ((lisp_global(HEAP_END)-lisp_global(HEAP_START)) > bytes_needed) {
239    untenure_from_area(tenured_area); /* force a full GC */
240    gc_from_xp(xp, 0L);
[16066]241    did_gc_notification_since_last_full_gc = false;
[14119]242  }
243 
244  /* Try again, growing the heap if necessary */
[15842]245  if (new_heap_segment(xp, bytes_needed, true, tcr, NULL)) {
[14119]246    xpGPR(xp, allocptr) += disp_from_allocptr;
247    return true;
248  }
249 
250  return false;
251}
252
253#ifndef XNOMEM
254#define XNOMEM 10
255#endif
256
257void
258update_bytes_allocated(TCR* tcr, void *cur_allocptr)
259{
260  BytePtr
261    last = (BytePtr) tcr->last_allocptr, 
262    current = (BytePtr) cur_allocptr;
263  if (last && (cur_allocptr != ((void *)VOID_ALLOCPTR))) {
264    tcr->bytes_allocated += last-current;
265  }
266  tcr->last_allocptr = 0;
267}
268
269void
270lisp_allocation_failure(ExceptionInformation *xp, TCR *tcr, natural bytes_needed)
271{
272  /* Couldn't allocate the object.  If it's smaller than some arbitrary
273     size (say 128K bytes), signal a "chronically out-of-memory" condition;
274     else signal a "allocation request failed" condition.
275  */
276  xpGPR(xp,allocptr) = xpGPR(xp,allocbase) = VOID_ALLOCPTR;
277  handle_error(xp, bytes_needed < (128<<10) ? XNOMEM : error_alloc_failed,0, NULL);
278}
279
[16066]280void
281callback_for_gc_notification(ExceptionInformation *xp, TCR *tcr)
282{
283  LispObj cmain = nrs_CMAIN.vcell;
284
285  did_gc_notification_since_last_full_gc = true;
286  if ((fulltag_of(cmain) == fulltag_misc) &&
287      (header_subtag(header_of(cmain)) == subtag_macptr)) {
288    callback_to_lisp(cmain,xp,SIGTRAP,0,NULL);
289  }
290}
291
[14119]292/*
293  Allocate a large list, where "large" means "large enough to
294  possibly trigger the EGC several times if this was done
295  by individually allocating each CONS."  The number of
296  ocnses in question is in arg_z; on successful return,
297  the list will be in arg_z
298*/
299
300Boolean
301allocate_list(ExceptionInformation *xp, TCR *tcr)
302{
303  natural
304    nconses = (unbox_fixnum(xpGPR(xp,arg_z))),
305    bytes_needed = (nconses << dnode_shift);
306  LispObj
307    prev = lisp_nil,
308    current,
309    initial = xpGPR(xp,arg_y);
[16066]310  Boolean notify_pending_gc = false;
[14119]311
312  if (nconses == 0) {
313    /* Silly case */
314    xpGPR(xp,arg_z) = lisp_nil;
315    return true;
316  }
317  update_bytes_allocated(tcr, (void *)(void *) tcr->save_allocptr);
[16066]318  if (allocate_object(xp,bytes_needed,(-bytes_needed)+fulltag_cons,tcr,&notify_pending_gc)) {
[14119]319    for (current = xpGPR(xp,allocptr);
320         nconses;
321         prev = current, current+= dnode_size, nconses--) {
322      deref(current,0) = prev;
323      deref(current,1) = initial;
324    }
325    xpGPR(xp,arg_z) = prev;
326    xpGPR(xp,arg_y) = xpGPR(xp,allocptr);
327    xpGPR(xp,allocptr)-=fulltag_cons;
[16066]328    if (notify_pending_gc && !did_gc_notification_since_last_full_gc) {
329      callback_for_gc_notification(xp,tcr);
330    }
[14119]331  } else {
332    lisp_allocation_failure(xp,tcr,bytes_needed);
333  }
334  return true;
335}
336
337Boolean
[16066]338handle_alloc_trap(ExceptionInformation *xp, TCR *tcr, Boolean *notify)
[14119]339{
340  pc program_counter;
341  natural cur_allocptr, bytes_needed = 0;
342  opcode prev_instr;
343  signed_natural disp = 0;
344  unsigned allocptr_tag;
345
[14807]346  if (!allocation_enabled) {
347    /* Back up before the alloc_trap, then let pc_luser_xp() back
348       up some more. */
349    xpPC(xp)-=1;
350    pc_luser_xp(xp,tcr, NULL);
351    allocation_enabled = true;
352    tcr->save_allocbase = (void *)VOID_ALLOCPTR;
353    handle_error(xp, error_allocation_disabled,0,NULL);
354    return true;
355  }
356
[14119]357  cur_allocptr = xpGPR(xp,allocptr);
358
359  allocptr_tag = fulltag_of(cur_allocptr);
360
361  switch (allocptr_tag) {
362  case fulltag_cons:
363    bytes_needed = sizeof(cons);
364    disp = -sizeof(cons) + fulltag_cons;
365    break;
366
367  case fulltag_misc:
368    disp = allocptr_displacement(xp);
369    bytes_needed = (-disp) + fulltag_misc;
370    break;
371
372    /* else fall thru */
373  default:
374    return false;
375  }
376
377  update_bytes_allocated(tcr,((BytePtr)(cur_allocptr-disp)));
[16066]378  if (allocate_object(xp, bytes_needed, disp, tcr, notify)) {
[14119]379    adjust_exception_pc(xp,4);
[16066]380    if (notify && *notify) {
381      pc_luser_xp(xp,tcr,NULL);
382      callback_for_gc_notification(xp,tcr);
383    }
[14119]384    return true;
385  }
386  lisp_allocation_failure(xp,tcr,bytes_needed);
387  return true;
388}
389
390natural gc_deferred = 0, full_gc_deferred = 0;
391
392signed_natural
393flash_freeze(TCR *tcr, signed_natural param)
394{
395  return 0;
396}
397
398Boolean
399handle_gc_trap(ExceptionInformation *xp, TCR *tcr)
400{
401  LispObj
402    selector = xpGPR(xp,imm0), 
403    arg = xpGPR(xp,imm1);
404  area *a = active_dynamic_area;
405  Boolean egc_was_enabled = (a->older != NULL);
406  natural gc_previously_deferred = gc_deferred;
407
408
409  switch (selector) {
410  case GC_TRAP_FUNCTION_EGC_CONTROL:
411    egc_control(arg != 0, a->active);
412    xpGPR(xp,arg_z) = lisp_nil + (egc_was_enabled ? t_offset : 0);
413    break;
414
415  case GC_TRAP_FUNCTION_CONFIGURE_EGC:
416    a->threshold = unbox_fixnum(xpGPR(xp, arg_x));
417    g1_area->threshold = unbox_fixnum(xpGPR(xp, arg_y));
418    g2_area->threshold = unbox_fixnum(xpGPR(xp, arg_z));
419    xpGPR(xp,arg_z) = lisp_nil+t_offset;
420    break;
421
422  case GC_TRAP_FUNCTION_SET_LISP_HEAP_THRESHOLD:
423    if (((signed_natural) arg) > 0) {
424      lisp_heap_gc_threshold = 
425        align_to_power_of_2((arg-1) +
426                            (heap_segment_size - 1),
427                            log2_heap_segment_size);
428    }
429    /* fall through */
430  case GC_TRAP_FUNCTION_GET_LISP_HEAP_THRESHOLD:
431    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
432    break;
433
434  case GC_TRAP_FUNCTION_USE_LISP_HEAP_THRESHOLD:
435    /*  Try to put the current threshold in effect.  This may
436        need to disable/reenable the EGC. */
437    untenure_from_area(tenured_area);
438    resize_dynamic_heap(a->active,lisp_heap_gc_threshold);
439    if (egc_was_enabled) {
440      if ((a->high - a->active) >= a->threshold) {
441        tenure_to_area(tenured_area);
442      }
443    }
444    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
445    break;
446
[16066]447  case GC_TRAP_FUNCTION_SET_GC_NOTIFICATION_THRESHOLD:
448    if ((signed_natural)arg >= 0) {
449      lisp_heap_notify_threshold = arg;
450      did_gc_notification_since_last_full_gc = false;
451    }
452    /* fall through */
453
454  case GC_TRAP_FUNCTION_GET_GC_NOTIFICATION_THRESHOLD:
455    xpGPR(xp, imm0) = lisp_heap_notify_threshold;
456    break;
457
458
[14119]459  case GC_TRAP_FUNCTION_ENSURE_STATIC_CONSES:
460    ensure_static_conses(xp,tcr,32768);
461    break;
462
463  case GC_TRAP_FUNCTION_FLASH_FREEZE:
464    untenure_from_area(tenured_area);
465    gc_like_from_xp(xp,flash_freeze,0);
466    a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
467    tenured_area->static_dnodes = area_dnode(a->active, a->low);
468    if (egc_was_enabled) {
469      tenure_to_area(tenured_area);
470    }
471    xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
472    break;
473
[14807]474  case GC_TRAP_FUNCTION_ALLOCATION_CONTROL:
475    switch(arg) {
476    case 0: /* disable if allocation enabled */
477      xpGPR(xp, arg_z) = lisp_nil;
478      if (allocation_enabled) {
479        TCR *other_tcr;
480        ExceptionInformation *other_context;
481        suspend_other_threads(true);
482        normalize_tcr(xp,tcr,false);
483        for (other_tcr=tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
484          other_context = other_tcr->pending_exception_context;
485          if (other_context == NULL) {
486            other_context = other_tcr->suspend_context;
487          }
488          normalize_tcr(other_context, other_tcr, true);
489        }
490        allocation_enabled = false;
491        xpGPR(xp, arg_z) = t_value;
492        resume_other_threads(true);
493      }
494      break;
495
496    case 1:                     /* enable if disabled */
497      xpGPR(xp, arg_z) = lisp_nil;
498      if (!allocation_enabled) {
499        allocation_enabled = true;
500        xpGPR(xp, arg_z) = t_value;
501      }
502      break;
503
504    default:
505      xpGPR(xp, arg_z) = lisp_nil;
506      if (allocation_enabled) {
507        xpGPR(xp, arg_z) = t_value;
508      }
509      break;
510    }
511    break;
512
513       
[14119]514  default:
515    update_bytes_allocated(tcr, (void *) ptr_from_lispobj(xpGPR(xp, allocptr)));
516
517    if (selector == GC_TRAP_FUNCTION_IMMEDIATE_GC) {
518      if (!full_gc_deferred) {
519        gc_from_xp(xp, 0L);
520        break;
521      }
522      /* Tried to do a full GC when gc was disabled.  That failed,
523         so try full GC now */
524      selector = GC_TRAP_FUNCTION_GC;
525    }
526   
527    if (egc_was_enabled) {
528      egc_control(false, (BytePtr) a->active);
529    }
530    gc_from_xp(xp, 0L);
531    if (gc_deferred > gc_previously_deferred) {
532      full_gc_deferred = 1;
533    } else {
534      full_gc_deferred = 0;
535    }
536    if (selector > GC_TRAP_FUNCTION_GC) {
537      if (selector & GC_TRAP_FUNCTION_IMPURIFY) {
538        impurify_from_xp(xp, 0L);
539        /*        nrs_GC_EVENT_STATUS_BITS.vcell |= gc_integrity_check_bit; */
540        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
541        gc_from_xp(xp, 0L);
542      }
543      if (selector & GC_TRAP_FUNCTION_PURIFY) {
544        purify_from_xp(xp, 0L);
[15370]545        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
[14119]546        gc_from_xp(xp, 0L);
547      }
548      if (selector & GC_TRAP_FUNCTION_SAVE_APPLICATION) {
549        OSErr err;
550        extern OSErr save_application(unsigned, Boolean);
551        TCR *tcr = get_tcr(true);
552        area *vsarea = tcr->vs_area;
553       
554        nrs_TOPLFUNC.vcell = *((LispObj *)(vsarea->high)-1);
555        err = save_application(arg, egc_was_enabled);
556        if (err == noErr) {
557          _exit(0);
558        }
559        fatal_oserr(": save_application", err);
560      }
561      switch (selector) {
562
563
564      case GC_TRAP_FUNCTION_FREEZE:
565        a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
566        tenured_area->static_dnodes = area_dnode(a->active, a->low);
567        xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
568        break;
569      default:
570        break;
571      }
572    }
573   
574    if (egc_was_enabled) {
575      egc_control(true, NULL);
576    }
577    break;
578   
579  }
580
581  adjust_exception_pc(xp,4);
582  return true;
583}
584
585
586
587void
588signal_stack_soft_overflow(ExceptionInformation *xp, unsigned reg)
589{
590  /* The cstack just overflowed.  Force the current thread's
591     control stack to do so until all stacks are well under their overflow
592     limits.
593  */
594
595#if 0
596  lisp_global(CS_OVERFLOW_LIMIT) = CS_OVERFLOW_FORCE_LIMIT; /* force unsigned traps to fail */
597#endif
598  handle_error(xp, error_stack_overflow, reg, NULL);
599}
600
601/*
602  Lower (move toward 0) the "end" of the soft protected area associated
603  with a by a page, if we can.
604*/
605
606void
607adjust_soft_protection_limit(area *a)
608{
609  char *proposed_new_soft_limit = a->softlimit - 4096;
610  protected_area_ptr p = a->softprot;
611 
612  if (proposed_new_soft_limit >= (p->start+16384)) {
613    p->end = proposed_new_soft_limit;
614    p->protsize = p->end-p->start;
615    a->softlimit = proposed_new_soft_limit;
616  }
617  protect_area(p);
618}
619
620void
621restore_soft_stack_limit(unsigned stkreg)
622{
623  area *a;
624  TCR *tcr = get_tcr(true);
625
626  switch (stkreg) {
627  case Rsp:
628    a = tcr->cs_area;
629    if ((a->softlimit - 4096) > (a->hardlimit + 16384)) {
630      a->softlimit -= 4096;
631    }
632    tcr->cs_limit = (LispObj)ptr_to_lispobj(a->softlimit);
633    break;
634  case vsp:
635    a = tcr->vs_area;
636    adjust_soft_protection_limit(a);
637    break;
638  }
639}
640
641/* Maybe this'll work someday.  We may have to do something to
642   make the thread look like it's not handling an exception */
643void
644reset_lisp_process(ExceptionInformation *xp)
645{
[14965]646#if 0
[14119]647  TCR *tcr = get_tcr(true);
648  catch_frame *last_catch = (catch_frame *) ptr_from_lispobj(untag(tcr->catch_top));
649
650  tcr->save_allocptr = (void *) ptr_from_lispobj(xpGPR(xp, allocptr));
651
652  tcr->save_vsp = (LispObj *) ptr_from_lispobj(((lisp_frame *)ptr_from_lispobj(last_catch->csp))->savevsp);
653
654  start_lisp(tcr, 1);
[14965]655#endif
[14119]656}
657
658
[15842]659
660void
661platform_new_heap_segment(ExceptionInformation *xp, TCR *tcr, BytePtr low, BytePtr high)
[14119]662{
[15842]663  tcr->last_allocptr = (void *)high;
664  xpGPR(xp,allocptr) = (LispObj) high;
665  tcr->save_allocbase = (void*) low;
[14119]666}
667 
668void
669update_area_active (area **aptr, BytePtr value)
670{
671  area *a = *aptr;
672  for (; a; a = a->older) {
673    if ((a->low <= value) && (a->high >= value)) break;
674  };
675  if (a == NULL) Bug(NULL, "Can't find active area");
676  a->active = value;
677  *aptr = a;
678
679  for (a = a->younger; a; a = a->younger) {
680    a->active = a->high;
681  }
682}
683
684LispObj *
685tcr_frame_ptr(TCR *tcr)
686{
687  ExceptionInformation *xp;
688  LispObj *bp = NULL;
689
690  if (tcr->pending_exception_context)
691    xp = tcr->pending_exception_context;
692  else {
693    xp = tcr->suspend_context;
694  }
695  if (xp) {
696    bp = (LispObj *) xpGPR(xp, Rsp);
697  }
698  return bp;
699}
700
701void
702normalize_tcr(ExceptionInformation *xp, TCR *tcr, Boolean is_other_tcr)
[14807]703
[14119]704{
705  void *cur_allocptr = NULL;
706  LispObj freeptr = 0;
707
708  if (xp) {
709    if (is_other_tcr) {
710      pc_luser_xp(xp, tcr, NULL);
711      freeptr = xpGPR(xp, allocptr);
712      if (fulltag_of(freeptr) == 0){
713        cur_allocptr = (void *) ptr_from_lispobj(freeptr);
714      }
715    }
716    update_area_active((area **)&tcr->cs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp)));
717    update_area_active((area **)&tcr->vs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, vsp)));
718  } else {
719    /* In ff-call. */
720    cur_allocptr = (void *) (tcr->save_allocptr);
721    update_area_active((area **)&tcr->vs_area, (BytePtr) tcr->save_vsp);
722    update_area_active((area **)&tcr->cs_area, (BytePtr) tcr->last_lisp_frame);
723  }
724
725
726  tcr->save_allocptr = tcr->save_allocbase = (void *)VOID_ALLOCPTR;
727  if (cur_allocptr) {
728    update_bytes_allocated(tcr, cur_allocptr);
729    if (freeptr) {
730      xpGPR(xp, allocptr) = VOID_ALLOCPTR;
731    }
732  }
733}
734
735TCR *gc_tcr = NULL;
736
737/* Suspend and "normalize" other tcrs, then call a gc-like function
738   in that context.  Resume the other tcrs, then return what the
739   function returned */
740
741signed_natural
742gc_like_from_xp(ExceptionInformation *xp, 
743                signed_natural(*fun)(TCR *, signed_natural), 
744                signed_natural param)
745{
[16341]746
[14119]747  TCR *tcr = get_tcr(true), *other_tcr;
748  int result;
749  signed_natural inhibit;
750
751  suspend_other_threads(true);
752  inhibit = (signed_natural)(lisp_global(GC_INHIBIT_COUNT));
753  if (inhibit != 0) {
754    if (inhibit > 0) {
755      lisp_global(GC_INHIBIT_COUNT) = (LispObj)(-inhibit);
756    }
757    resume_other_threads(true);
758    gc_deferred++;
759    return 0;
760  }
761  gc_deferred = 0;
762
763  gc_tcr = tcr;
764
765  xpGPR(xp, allocptr) = VOID_ALLOCPTR;
766
767  normalize_tcr(xp, tcr, false);
768
769
770  for (other_tcr = tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
771    if (other_tcr->pending_exception_context) {
772      other_tcr->gc_context = other_tcr->pending_exception_context;
773    } else if (other_tcr->valence == TCR_STATE_LISP) {
774      other_tcr->gc_context = other_tcr->suspend_context;
775    } else {
776      /* no pending exception, didn't suspend in lisp state:
777         must have executed a synchronous ff-call.
778      */
779      other_tcr->gc_context = NULL;
780    }
781    normalize_tcr(other_tcr->gc_context, other_tcr, true);
782  }
783   
784
785
786  result = fun(tcr, param);
787
788  other_tcr = tcr;
789  do {
790    other_tcr->gc_context = NULL;
791    other_tcr = other_tcr->next;
792  } while (other_tcr != tcr);
793
794  gc_tcr = NULL;
795
796  resume_other_threads(true);
797
798  return result;
799
800}
801
802
803
804/* Returns #bytes freed by invoking GC */
805
806signed_natural
807gc_from_tcr(TCR *tcr, signed_natural param)
808{
809  area *a;
810  BytePtr oldfree, newfree;
811  BytePtr oldend, newend;
812
813  a = active_dynamic_area;
814  oldend = a->high;
815  oldfree = a->active;
816  gc(tcr, param);
817  newfree = a->active;
818  newend = a->high;
819#if 0
820  fprintf(dbgout, "End GC  in 0x%lx\n", tcr);
821#endif
822  return ((oldfree-newfree)+(newend-oldend));
823}
824
825signed_natural
826gc_from_xp(ExceptionInformation *xp, signed_natural param)
827{
828  signed_natural status = gc_like_from_xp(xp, gc_from_tcr, param);
829
830  freeGCptrs();
831  return status;
832}
833
834signed_natural
835purify_from_xp(ExceptionInformation *xp, signed_natural param)
836{
837  return gc_like_from_xp(xp, purify, param);
838}
839
840signed_natural
841impurify_from_xp(ExceptionInformation *xp, signed_natural param)
842{
843  return gc_like_from_xp(xp, impurify, param);
844}
845
846
847
848
849
850
851protection_handler
852 * protection_handlers[] = {
853   do_spurious_wp_fault,
854   do_soft_stack_overflow,
855   do_soft_stack_overflow,
856   do_soft_stack_overflow,
857   do_hard_stack_overflow,   
858   do_hard_stack_overflow,
859   do_hard_stack_overflow
860   };
861
862
863Boolean
864is_write_fault(ExceptionInformation *xp, siginfo_t *info)
865{
[14190]866  return ((xpFaultStatus(xp) & 0x800) != 0);
[14119]867}
868
869Boolean
870handle_protection_violation(ExceptionInformation *xp, siginfo_t *info, TCR *tcr, int old_valence)
871{
872  BytePtr addr;
873  protected_area_ptr area;
874  protection_handler *handler;
875  extern Boolean touch_page(void *);
876  extern void touch_page_end(void);
877
878#ifdef LINUX
879  addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
880#else
881  if (info) {
882    addr = (BytePtr)(info->si_addr);
883  } else {
884    addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
885  }
886#endif
887
888  if (addr && (addr == tcr->safe_ref_address)) {
889    adjust_exception_pc(xp,4);
890
891    xpGPR(xp,imm0) = 0;
892    return true;
893  }
894
895  if (xpPC(xp) == (pc)touch_page) {
896    xpGPR(xp,imm0) = 0;
897    xpPC(xp) = (pc)touch_page_end;
898    return true;
899  }
900
901
902  if (is_write_fault(xp,info)) {
903    area = find_protected_area(addr);
904    if (area != NULL) {
905      handler = protection_handlers[area->why];
906      return handler(xp, area, addr);
907    } else {
908      if ((addr >= readonly_area->low) &&
909          (addr < readonly_area->active)) {
910        UnProtectMemory((LogicalAddress)(truncate_to_power_of_2(addr,log2_page_size)),
911                        page_size);
912        return true;
913      }
914    }
915  }
916  if (old_valence == TCR_STATE_LISP) {
917    LispObj cmain = nrs_CMAIN.vcell;
918   
919    if ((fulltag_of(cmain) == fulltag_misc) &&
920      (header_subtag(header_of(cmain)) == subtag_macptr)) {
921     
922      callback_for_trap(nrs_CMAIN.vcell, xp, is_write_fault(xp,info)?SIGBUS:SIGSEGV, (natural)addr, NULL);
923    }
924  }
925  return false;
926}
927
928
929
930
931
932OSStatus
933do_hard_stack_overflow(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
934{
935#ifdef SUPPORT_PRAGMA_UNUSED
936#pragma unused(area,addr)
937#endif
938  reset_lisp_process(xp);
939  return -1;
940}
941
942extern area*
943allocate_vstack(natural useable);       /* This is in "pmcl-kernel.c" */
944
945extern area*
946allocate_tstack(natural useable);       /* This is in "pmcl-kernel.c" */
947
948
949
950
951
952
953Boolean
954lisp_frame_p(lisp_frame *spPtr)
955{
956  return (spPtr->marker == lisp_frame_marker);
957}
958
959
960int ffcall_overflow_count = 0;
961
962
963
964
965
966
967/* Note: CURRENT_VS (CURRENT_TS) is always either the area containing
968  the current value of VSP (TSP) or an older area.  */
969
970OSStatus
971do_vsp_overflow (ExceptionInformation *xp, BytePtr addr)
972{
973  TCR* tcr = get_tcr(true);
974  area *a = tcr->vs_area;
975  protected_area_ptr vsp_soft = a->softprot;
976  unprotect_area(vsp_soft);
977  signal_stack_soft_overflow(xp,vsp);
978  return 0;
979}
980
981
982
983OSStatus
984do_soft_stack_overflow(ExceptionInformation *xp, protected_area_ptr prot_area, BytePtr addr)
985{
986  /* Trying to write into a guard page on the vstack or tstack.
987     Allocate a new stack segment, emulate stwu and stwux for the TSP, and
988     signal an error_stack_overflow condition.
989      */
990  if (prot_area->why == kVSPsoftguard) {
991    return do_vsp_overflow(xp,addr);
992  }
993  unprotect_area(prot_area);
994  signal_stack_soft_overflow(xp,Rsp);
995  return 0;
996}
997
998OSStatus
999do_spurious_wp_fault(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
1000{
1001#ifdef SUPPORT_PRAGMA_UNUSED
1002#pragma unused(xp,area,addr)
1003#endif
1004  return -1;
1005}
1006
1007
1008
1009
1010     
1011
1012
1013
1014
1015
1016Boolean
1017handle_sigfpe(ExceptionInformation *xp, TCR *tcr)
1018{
1019  return false;
1020}
1021
1022
1023Boolean
1024handle_unimplemented_instruction(ExceptionInformation *xp,
1025                                 opcode instruction,
1026                                 TCR *tcr)
1027{
1028
1029  return false;
1030}
1031
1032Boolean
1033handle_exception(int xnum, 
1034                 ExceptionInformation *xp, 
1035                 TCR *tcr, 
1036                 siginfo_t *info,
1037                 int old_valence)
1038{
1039  pc program_counter;
1040  opcode instruction = 0;
1041
1042  if (old_valence != TCR_STATE_LISP) {
1043    return false;
1044  }
1045
1046  program_counter = xpPC(xp);
1047 
1048  if ((xnum == SIGILL) | (xnum == SIGTRAP)) {
1049    instruction = *program_counter;
1050  }
1051
1052  if (IS_ALLOC_TRAP(instruction)) {
[16066]1053    Boolean did_notify = false,
1054      *notify_ptr = &did_notify;
1055    if (did_gc_notification_since_last_full_gc) {
1056      notify_ptr = NULL;
1057    }
1058    return handle_alloc_trap(xp, tcr, notify_ptr);
[14119]1059  } else if ((xnum == SIGSEGV) ||
1060             (xnum == SIGBUS)) {
1061    return handle_protection_violation(xp, info, tcr, old_valence);
1062  } else if (xnum == SIGFPE) {
1063    return handle_sigfpe(xp, tcr);
1064  } else if ((xnum == SIGILL)) {
1065    if (IS_GC_TRAP(instruction)) {
1066      return handle_gc_trap(xp, tcr);
1067    } else if (IS_UUO(instruction)) {
1068      return handle_uuo(xp, info, instruction);
1069    } else {
1070      return handle_unimplemented_instruction(xp,instruction,tcr);
1071    }
1072  } else if (xnum == SIGNAL_FOR_PROCESS_INTERRUPT) {
1073    tcr->interrupt_pending = 0;
1074    callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
1075    return true;
1076  }
1077
1078  return false;
1079}
1080
1081void
1082adjust_exception_pc(ExceptionInformation *xp, int delta)
1083{
1084  xpPC(xp) += (delta >> 2);
1085}
1086
1087
1088/*
1089  This wants to scan backwards until "where" points to an instruction
1090   whose major opcode is either 63 (double-float) or 59 (single-float)
1091*/
1092
1093OSStatus
1094handle_fpux_binop(ExceptionInformation *xp, pc where)
1095{
1096  OSStatus err = -1;
1097  opcode *there = (opcode *) where, instr, errnum = 0;
1098  return err;
1099}
1100
1101Boolean
1102handle_uuo(ExceptionInformation *xp, siginfo_t *info, opcode the_uuo) 
1103{
1104  unsigned 
1105    format = UUO_FORMAT(the_uuo);
1106  Boolean handled = false;
1107  int bump = 4;
1108  TCR *tcr = get_tcr(true);
1109
1110  switch (format) {
1111  case uuo_format_kernel_service:
1112    {
1113      TCR * target = (TCR *)xpGPR(xp,arg_z);
1114      int service = UUO_UNARY_field(the_uuo);
1115
1116      switch (service) {
1117      case error_propagate_suspend:
1118        handled = true;
1119        break;
1120      case error_interrupt:
1121        xpGPR(xp,imm0) = (LispObj) raise_thread_interrupt(target);
1122        handled = true;
1123        break;
1124      case error_suspend:
1125        xpGPR(xp,imm0) = (LispObj) lisp_suspend_tcr(target);
1126        handled = true;
1127        break;
1128      case error_suspend_all:
1129        lisp_suspend_other_threads();
1130        handled = true;
1131        break;
1132      case error_resume:
1133        xpGPR(xp,imm0) = (LispObj) lisp_resume_tcr(target);
1134        handled = true;
1135        break;
1136      case error_resume_all:
1137        lisp_resume_other_threads();
1138        handled = true;
1139        break;
1140      case error_kill:
1141        xpGPR(xp,imm0) = (LispObj)kill_tcr(target);
1142        handled = true;
1143        break;
1144      case error_allocate_list:
1145        allocate_list(xp,tcr);
1146        handled = true;
1147        break;
1148      default:
1149        handled = false;
1150        break;
1151      }
1152      break;
1153    }
1154
1155  case uuo_format_unary:
1156    switch(UUO_UNARY_field(the_uuo)) {
1157    case 3:
1158      if (extend_tcr_tlb(tcr,xp,UUOA_field(the_uuo))) {
1159        handled = true;
1160        bump = 4;
1161        break;
1162      }
1163      /* fall in */
1164    default:
1165      handled = false;
1166      break;
1167
1168    }
1169    break;
1170
1171  case uuo_format_nullary:
1172    switch (UUOA_field(the_uuo)) {
1173    case 3:
1174      adjust_exception_pc(xp, bump);
1175      bump = 0;
1176      lisp_Debugger(xp, info, debug_entry_dbg, false, "Lisp Breakpoint");
1177      handled = true;
1178      break;
1179
1180    case 4:
1181      tcr->interrupt_pending = 0;
[14545]1182      callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
[14119]1183      handled = true;
1184      break;
1185    default:
1186      handled = false;
1187      break;
1188    }
1189    break;
1190
1191
1192  case uuo_format_error_lisptag:
1193  case uuo_format_error_fulltag:
1194  case uuo_format_error_xtype:
1195  case uuo_format_cerror_lisptag:
1196  case uuo_format_cerror_fulltag:
1197  case uuo_format_cerror_xtype:
1198  case uuo_format_nullary_error:
1199  case uuo_format_unary_error:
1200  case uuo_format_binary_error:
1201  case uuo_format_ternary:
[15158]1202  case uuo_format_ternary2:
[14119]1203    handled = handle_error(xp,0,the_uuo, &bump);
1204    break;
1205
1206  default:
1207    handled = false;
1208    bump = 0;
1209  }
1210 
1211  if (handled && bump) {
1212    adjust_exception_pc(xp, bump);
1213  }
1214  return handled;
1215}
1216
1217natural
1218register_codevector_contains_pc (natural lisp_function, pc where)
1219{
1220  natural code_vector, size;
1221
1222  if ((fulltag_of(lisp_function) == fulltag_misc) &&
1223      (header_subtag(header_of(lisp_function)) == subtag_function)) {
1224    code_vector = deref(lisp_function, 2);
1225    size = header_element_count(header_of(code_vector)) << 2;
1226    if ((untag(code_vector) < (natural)where) && 
1227        ((natural)where < (code_vector + size)))
1228      return(code_vector);
1229  }
1230
1231  return(0);
1232}
1233
1234Boolean
1235callback_for_trap (LispObj callback_macptr, ExceptionInformation *xp, natural info,natural arg, int *bumpP)
1236{
1237  return callback_to_lisp(callback_macptr, xp, info,arg, bumpP);
1238}
1239
1240Boolean
1241callback_to_lisp (LispObj callback_macptr, ExceptionInformation *xp,
1242                  natural arg1, natural arg2, int *bumpP)
1243{
1244  natural  callback_ptr;
1245  area *a;
[14271]1246  natural fnreg = Rfn,  codevector, offset;
[14119]1247  pc where = xpPC(xp);
1248  int delta;
1249
1250  codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1251  if (codevector == 0) {
1252    fnreg = nfn;
1253    codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1254    if (codevector == 0) {
1255      fnreg = 0;
1256    }
1257  }
1258  if (codevector) {
1259    offset = (natural)where - (codevector - (fulltag_misc-node_size));
1260  } else {
1261    offset = (natural)where;
1262  }
1263                                                 
1264                                               
1265
1266  TCR *tcr = get_tcr(true);
1267
1268  /* Put the active stack pointer where .SPcallback expects it */
1269  a = tcr->cs_area;
1270  a->active = (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp));
1271
1272  /* Copy globals from the exception frame to tcr */
1273  tcr->save_allocptr = (void *)ptr_from_lispobj(xpGPR(xp, allocptr));
1274  tcr->save_vsp = (LispObj*) ptr_from_lispobj(xpGPR(xp, vsp));
1275
1276
1277
1278  /* Call back.
1279     Lisp will handle trampolining through some code that
1280     will push lr/fn & pc/nfn stack frames for backtrace.
1281  */
1282  callback_ptr = ((macptr *)ptr_from_lispobj(untag(callback_macptr)))->address;
1283  UNLOCK(lisp_global(EXCEPTION_LOCK), tcr);
1284  delta = ((int (*)())callback_ptr) (xp, arg1, arg2, fnreg, offset);
1285  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1286
1287  if (bumpP) {
1288    *bumpP = delta;
1289  }
1290
1291  /* Copy GC registers back into exception frame */
1292  xpGPR(xp, allocptr) = (LispObj) ptr_to_lispobj(tcr->save_allocptr);
1293  return true;
1294}
1295
1296area *
1297allocate_no_stack (natural size)
1298{
1299#ifdef SUPPORT_PRAGMA_UNUSED
1300#pragma unused(size)
1301#endif
1302
1303  return (area *) NULL;
1304}
1305
1306
1307
1308
1309
1310
1311/* callback to (symbol-value cmain) if it is a macptr,
1312   otherwise report cause and function name to console.
1313   Returns noErr if exception handled OK */
1314OSStatus
1315handle_trap(ExceptionInformation *xp, opcode the_trap, pc where, siginfo_t *info)
1316{
1317  LispObj   cmain = nrs_CMAIN.vcell;
1318  TCR *tcr = TCR_FROM_TSD(xpGPR(xp, rcontext));
1319
1320}
1321
1322
1323
1324
1325void non_fatal_error( char *msg )
1326{
1327  fprintf( dbgout, "Non-fatal error: %s.\n", msg );
1328  fflush( dbgout );
1329}
1330
1331
1332
1333Boolean
1334handle_error(ExceptionInformation *xp, unsigned arg1, unsigned arg2, int *bumpP)
1335{
1336  LispObj   errdisp = nrs_ERRDISP.vcell;
1337
1338  if ((fulltag_of(errdisp) == fulltag_misc) &&
1339      (header_subtag(header_of(errdisp)) == subtag_macptr)) {
1340    /* errdisp is a macptr, we can call back to lisp */
1341    return callback_for_trap(errdisp, xp, arg1, arg2, bumpP);
1342    }
1343
1344  return false;
1345}
1346               
1347
1348/*
1349   Current thread has all signals masked.  Before unmasking them,
1350   make it appear that the current thread has been suspended.
1351   (This is to handle the case where another thread is trying
1352   to GC before this thread is able to sieze the exception lock.)
1353*/
1354int
1355prepare_to_wait_for_exception_lock(TCR *tcr, ExceptionInformation *context)
1356{
1357  int old_valence = tcr->valence;
1358
1359  tcr->pending_exception_context = context;
1360  tcr->valence = TCR_STATE_EXCEPTION_WAIT;
1361
1362  ALLOW_EXCEPTIONS(context);
1363  return old_valence;
1364} 
1365
1366void
1367wait_for_exception_lock_in_handler(TCR *tcr, 
1368                                   ExceptionInformation *context,
1369                                   xframe_list *xf)
1370{
1371
1372  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1373  xf->curr = context;
1374  xf->prev = tcr->xframe;
1375  tcr->xframe =  xf;
1376  tcr->pending_exception_context = NULL;
1377  tcr->valence = TCR_STATE_FOREIGN; 
1378}
1379
1380void
1381unlock_exception_lock_in_handler(TCR *tcr)
1382{
1383  tcr->pending_exception_context = tcr->xframe->curr;
1384  tcr->xframe = tcr->xframe->prev;
1385  tcr->valence = TCR_STATE_EXCEPTION_RETURN;
1386  UNLOCK(lisp_global(EXCEPTION_LOCK),tcr);
1387}
1388
1389/*
1390   If an interrupt is pending on exception exit, try to ensure
1391   that the thread sees it as soon as it's able to run.
1392*/
1393void
1394raise_pending_interrupt(TCR *tcr)
1395{
1396  if (TCR_INTERRUPT_LEVEL(tcr) > 0) {
1397    pthread_kill((pthread_t)ptr_from_lispobj(tcr->osid), SIGNAL_FOR_PROCESS_INTERRUPT);
1398  }
1399}
1400
1401void
1402exit_signal_handler(TCR *tcr, int old_valence, natural old_last_lisp_frame)
1403{
[14549]1404#ifndef ANDROID
[14119]1405  sigset_t mask;
1406  sigfillset(&mask);
[14549]1407#else
[15093]1408  int mask [] = {-1,-1};
[14549]1409#endif
[14119]1410 
[14549]1411  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask, NULL);
[14119]1412  tcr->valence = old_valence;
1413  tcr->pending_exception_context = NULL;
1414  tcr->last_lisp_frame = old_last_lisp_frame;
1415}
1416
1417
1418void
[15093]1419signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context
1420#ifdef DARWIN
1421, TCR *tcr, int old_valence, natural old_last_lisp_frame
1422#endif
1423)
[14119]1424{
1425  xframe_list xframe_link;
[15093]1426#ifndef DARWIN
[14119]1427   
[15093]1428    TCR *tcr = (TCR *) get_interrupt_tcr(false);
[14119]1429 
1430    /* The signal handler's entered with all signals (notably the
1431       thread_suspend signal) blocked.  Don't allow any other signals
1432       (notably the thread_suspend signal) to preempt us until we've
1433       set the TCR's xframe slot to include the current exception
1434       context.
1435    */
1436   
[15093]1437    natural  old_last_lisp_frame = tcr->last_lisp_frame;
1438    int old_valence;
1439
[14119]1440    tcr->last_lisp_frame = xpGPR(context,Rsp);
1441    old_valence = prepare_to_wait_for_exception_lock(tcr, context);
[15093]1442#endif
[14119]1443
1444  if (tcr->flags & (1<<TCR_FLAG_BIT_PENDING_SUSPEND)) {
1445    CLR_TCR_FLAG(tcr, TCR_FLAG_BIT_PENDING_SUSPEND);
1446    pthread_kill(pthread_self(), thread_suspend_signal);
1447  }
1448
1449 
1450  wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1451  if ((!handle_exception(signum, context, tcr, info, old_valence))) {
1452    char msg[512];
1453    snprintf(msg, sizeof(msg), "Unhandled exception %d at 0x%lx, context->regs at #x%lx", signum, xpPC(context), (natural)xpGPRvector(context));
[15093]1454    if (lisp_Debugger(context, info, signum, (old_valence != TCR_STATE_LISP), msg)) {
[14119]1455      SET_TCR_FLAG(tcr,TCR_FLAG_BIT_PROPAGATE_EXCEPTION);
1456    }
1457  }
1458  unlock_exception_lock_in_handler(tcr);
1459
1460  /* This thread now looks like a thread that was suspended while
1461     executing lisp code.  If some other thread gets the exception
1462     lock and GCs, the context (this thread's suspend_context) will
1463     be updated.  (That's only of concern if it happens before we
1464     can return to the kernel/to the Mach exception handler).
1465  */
[15470]1466  exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1467  raise_pending_interrupt(tcr);
[14119]1468}
1469
[14517]1470
1471void
1472sigill_handler(int signum, siginfo_t *info, ExceptionInformation  *xp)
1473{
1474  pc program_counter = xpPC(xp);
1475  opcode instr = *program_counter;
1476
1477  if (IS_UUO(instr)) {
1478    natural psr = xpPSR(xp);
1479    Boolean opcode_matched_condition = false,
1480      flip = ((instr & (1<<28)) != 0);
1481   
1482
1483    switch (instr >> 29) {
1484    case 0: 
1485      opcode_matched_condition = ((psr & PSR_Z_MASK) != 0);
1486      break;
1487    case 1:
1488      opcode_matched_condition = ((psr & PSR_C_MASK) != 0);
1489      break;
1490    case 2:
1491      opcode_matched_condition = ((psr & PSR_N_MASK) != 0);
1492      break;
1493    case 3:
1494      opcode_matched_condition = ((psr & PSR_V_MASK) != 0);
1495      break;
1496    case 4:
1497      opcode_matched_condition = (((psr & PSR_C_MASK) != 0) &&
1498                                  ((psr & PSR_Z_MASK) == 0));
1499      break;
1500    case 5:
1501      opcode_matched_condition = (((psr & PSR_N_MASK) != 0) ==
1502                                  ((psr & PSR_V_MASK) != 0));
1503      break;
1504    case 6:
1505      opcode_matched_condition = ((((psr & PSR_N_MASK) != 0) ==
1506                                   ((psr & PSR_V_MASK) != 0)) &&
1507                                  ((psr & PSR_Z_MASK) == 0));
1508      break;
1509    case 7:
1510      opcode_matched_condition = true;
1511      flip = false;
1512      break;
1513    }
1514    if (flip) {
1515      opcode_matched_condition = !opcode_matched_condition;
1516    }
1517    if (!opcode_matched_condition) {
1518      adjust_exception_pc(xp,4);
1519      return;
1520    }
1521  }
[15093]1522  signal_handler(signum,info,xp);
[14517]1523}
1524
1525
[14119]1526#ifdef USE_SIGALTSTACK
1527void
1528invoke_handler_on_main_stack(int signo, siginfo_t *info, ExceptionInformation *xp, void *return_address, void *handler)
1529{
1530  ExceptionInformation *xp_copy;
1531  siginfo_t *info_copy;
1532  extern void call_handler_on_main_stack(int, siginfo_t *, ExceptionInformation *,void *, void *);
1533 
1534  BytePtr target_sp= (BytePtr)xpGPR(xp,Rsp);
1535  target_sp -= sizeof(ucontext_t);
1536  xp_copy = (ExceptionInformation *)target_sp;
1537  memmove(target_sp,xp,sizeof(*xp));
1538  xp_copy->uc_stack.ss_sp = 0;
1539  xp_copy->uc_stack.ss_size = 0;
1540  xp_copy->uc_stack.ss_flags = 0;
1541  xp_copy->uc_link = NULL;
1542  target_sp -= sizeof(siginfo_t);
1543  info_copy = (siginfo_t *)target_sp;
1544  memmove(target_sp,info,sizeof(*info));
1545  call_handler_on_main_stack(signo, info_copy, xp_copy, return_address, handler);
1546}
1547 
1548void
1549altstack_signal_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1550{
1551  TCR *tcr=get_tcr(true);
[14655]1552 
1553  if (signo == SIGBUS) {
1554    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address); 
1555    area *a = tcr->cs_area;
1556    if (((BytePtr)truncate_to_power_of_2(addr,log2_page_size))== a->softlimit) 
1557{
1558      if (mmap(a->softlimit,
1559               page_size,
1560               PROT_READ|PROT_WRITE|PROT_EXEC,
1561               MAP_PRIVATE|MAP_ANON|MAP_FIXED,
1562               -1,
1563               0) == a->softlimit) {
1564        return;
1565      }
1566    }
1567  } else if (signo == SIGSEGV) {
[14119]1568    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address);
1569    area *a = tcr->cs_area;
1570   
1571    if ((addr >= a->low) &&
1572        (addr < a->softlimit)) {
1573      if (addr < a->hardlimit) {
1574        Bug(xp, "hard stack overflow");
1575      } else {
1576        UnProtectMemory(a->hardlimit,a->softlimit-a->hardlimit);
1577      }
1578    }
1579  }
1580  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), signal_handler);
1581}
1582#endif
1583
1584/*
1585  If it looks like we're in the middle of an atomic operation, make
1586  it seem as if that operation is either complete or hasn't started
1587  yet.
1588
1589  The cases handled include:
1590
1591  a) storing into a newly-allocated lisp frame on the stack.
1592  b) marking a newly-allocated TSP frame as containing "raw" data.
1593  c) consing: the GC has its own ideas about how this should be
1594     handled, but other callers would be best advised to back
1595     up or move forward, according to whether we're in the middle
1596     of allocating a cons cell or allocating a uvector.
1597  d) a STMW to the vsp
1598  e) EGC write-barrier subprims.
1599*/
1600
1601extern opcode
1602  egc_write_barrier_start,
1603  egc_write_barrier_end, 
1604  egc_store_node_conditional, 
1605  egc_store_node_conditional_test,
[15747]1606  egc_set_hash_key, egc_set_hash_key_did_store,
1607  egc_gvset, egc_gvset_did_store,
1608  egc_rplaca, egc_rplaca_did_store,
1609  egc_rplacd, egc_rplacd_did_store,
[14119]1610  egc_set_hash_key_conditional,
[14791]1611  egc_set_hash_key_conditional_test,
1612  swap_lr_lisp_frame_temp0,
1613  swap_lr_lisp_frame_arg_z;
[14119]1614
1615
1616extern opcode ffcall_return_window, ffcall_return_window_end;
1617
1618void
1619pc_luser_xp(ExceptionInformation *xp, TCR *tcr, signed_natural *alloc_disp)
1620{
1621  pc program_counter = xpPC(xp);
1622  opcode instr = *program_counter;
1623  lisp_frame *frame = (lisp_frame *)ptr_from_lispobj(xpGPR(xp,Rsp));
1624  LispObj cur_allocptr = xpGPR(xp, allocptr);
1625  int allocptr_tag = fulltag_of(cur_allocptr);
1626 
1627
1628
1629  if ((program_counter < &egc_write_barrier_end) && 
1630      (program_counter >= &egc_write_barrier_start)) {
1631    LispObj *ea = 0, val = 0, root = 0;
1632    bitvector refbits = (bitvector)(lisp_global(REFBITS));
[15747]1633    Boolean need_check_memo = true, need_memoize_root = false;
[14119]1634
1635    if (program_counter >= &egc_set_hash_key_conditional) {
1636      if ((program_counter < &egc_set_hash_key_conditional_test) ||
1637          ((program_counter == &egc_set_hash_key_conditional_test) &&
[16341]1638           (xpGPR(xp,imm0) != 0))) {
[14119]1639        return;
1640      }
1641      root = xpGPR(xp,arg_x);
[15842]1642      ea = (LispObj *) (root+unbox_fixnum(xpGPR(xp,temp2)));
[14119]1643      need_memoize_root = true;
1644    } else if (program_counter >= &egc_store_node_conditional) {
1645      if ((program_counter < &egc_store_node_conditional_test) ||
1646          ((program_counter == &egc_store_node_conditional_test) &&
[16341]1647           (xpGPR(xp,imm0) != 0))) {
[14119]1648        /* The conditional store either hasn't been attempted yet, or
1649           has failed.  No need to adjust the PC, or do memoization. */
1650        return;
1651      }
[15842]1652      ea = (LispObj*)(xpGPR(xp,arg_x) + unbox_fixnum(xpGPR(xp,temp2)));
[14119]1653      xpGPR(xp,arg_z) = t_value;
1654    } else if (program_counter >= &egc_set_hash_key) {
[15747]1655      if (program_counter < &egc_set_hash_key_did_store) {
1656        return;
1657      }
[14119]1658      root = xpGPR(xp,arg_x);
1659      val = xpGPR(xp,arg_z);
[15826]1660      ea = (LispObj *) (root+xpGPR(xp,arg_y)+misc_data_offset);
[14119]1661      need_memoize_root = true;
1662    } else if (program_counter >= &egc_gvset) {
[15747]1663      if (program_counter < &egc_gvset_did_store) {
1664        return;
1665      } 
[14119]1666      ea = (LispObj *) (xpGPR(xp,arg_x)+xpGPR(xp,arg_y)+misc_data_offset);
1667      val = xpGPR(xp,arg_z);
1668    } else if (program_counter >= &egc_rplacd) {
[15747]1669      if (program_counter < &egc_rplacd_did_store) {
1670        return;
1671      } 
[14119]1672      ea = (LispObj *) untag(xpGPR(xp,arg_y));
1673      val = xpGPR(xp,arg_z);
1674    } else {                      /* egc_rplaca */
[15747]1675      if (program_counter < &egc_rplaca_did_store) {
1676        return;
1677      } 
[14119]1678      ea =  ((LispObj *) untag(xpGPR(xp,arg_y)))+1;
1679      val = xpGPR(xp,arg_z);
1680    }
1681    if (need_check_memo) {
1682      natural  bitnumber = area_dnode(ea, lisp_global(REF_BASE));
1683      if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT)) &&
1684          ((LispObj)ea < val)) {
1685        atomic_set_bit(refbits, bitnumber);
[15842]1686        atomic_set_bit(global_refidx, bitnumber>>8);
[14119]1687        if (need_memoize_root) {
1688          bitnumber = area_dnode(root, lisp_global(REF_BASE));
1689          atomic_set_bit(refbits, bitnumber);
[15842]1690          atomic_set_bit(global_refidx,bitnumber>>8);
[14119]1691        }
1692      }
1693    }
1694    xpPC(xp) = xpLR(xp);
1695    return;
1696  }
1697
1698
1699 
1700  if (allocptr_tag != tag_fixnum) {
1701    signed_natural disp = allocptr_displacement(xp);
1702
1703    if (disp) {
1704      /* Being architecturally "at" the alloc trap doesn't tell
1705         us much (in particular, it doesn't tell us whether
1706         or not the thread has committed to taking the trap
1707         and is waiting for the exception lock (or waiting
1708         for the Mach exception thread to tell it how bad
1709         things are) or is about to execute a conditional
1710         trap.
1711         Regardless of which case applies, we want the
1712         other thread to take (or finish taking) the
1713         trap, and we don't want it to consider its
1714         current allocptr to be valid.
1715         The difference between this case (suspend other
1716         thread for GC) and the previous case (suspend
1717         current thread for interrupt) is solely a
1718         matter of what happens after we leave this
1719         function: some non-current thread will stay
1720         suspended until the GC finishes, then take
1721         (or start processing) the alloc trap.   The
1722         current thread will go off and do PROCESS-INTERRUPT
1723         or something, and may return from the interrupt
1724         and need to finish the allocation that got interrupted.
1725      */
1726
1727      if (alloc_disp) {
1728        *alloc_disp = disp;
[14807]1729        xpGPR(xp,allocptr) -= disp;
[14119]1730        /* Leave the PC at the alloc trap.  When the interrupt
1731           handler returns, it'll decrement allocptr by disp
1732           and the trap may or may not be taken.
1733        */
1734      } else {
[14754]1735        Boolean ok = false;
[14807]1736        update_bytes_allocated(tcr, (void *) ptr_from_lispobj(cur_allocptr - disp));
[14354]1737        xpGPR(xp, allocptr) = VOID_ALLOCPTR + disp;
1738        instr = program_counter[-1];
[14754]1739        if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
1740          instr = program_counter[-2];
1741          if (IS_COMPARE_ALLOCPTR_TO_RM(instr)){
1742            xpGPR(xp,RM_field(instr)) = VOID_ALLOCPTR;
1743            ok = true;
1744          }
1745        }
1746        if (ok) {
[14807]1747          /* Clear the carry bit, so that the trap will be taken. */
1748          xpPSR(xp) &= ~PSR_C_MASK;
[14354]1749        } else {
1750          Bug(NULL, "unexpected instruction preceding alloc trap.");
1751        }
[14119]1752      }
1753    } else {
[14354]1754      /* we may be before or after the alloc trap.  If before, set
1755         allocptr to VOID_ALLOCPTR and back up to the start of the
1756         instruction sequence; if after, finish the allocation. */
1757      Boolean before_alloc_trap = false;
1758
[14534]1759      if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
[14533]1760        before_alloc_trap = true;
1761        --program_counter;
1762        instr = *program_counter;
1763      }
[14354]1764      if (IS_COMPARE_ALLOCPTR_TO_RM(instr)) {
1765        before_alloc_trap = true;
1766        --program_counter;
1767        instr = *program_counter;
1768      }
1769      if (IS_LOAD_RD_FROM_ALLOCBASE(instr)) {
1770        before_alloc_trap = true;
1771        --program_counter;
1772        instr = *program_counter;
1773      }
1774      if (IS_SUB_HI_FROM_ALLOCPTR(instr)) {
1775        before_alloc_trap = true;
1776        --program_counter;
1777      }
1778      if (before_alloc_trap) {
1779        xpPC(xp) = program_counter;
1780        xpGPR(xp,allocptr) = VOID_ALLOCPTR;
[14119]1781      } else {
[14354]1782        /* If we're already past the alloc_trap, finish allocating
1783           the object. */
1784        if (allocptr_tag == fulltag_cons) {
1785          finish_allocating_cons(xp);
[14119]1786        } else {
[14354]1787          if (allocptr_tag == fulltag_misc) {
1788            finish_allocating_uvector(xp);
1789          } else {
1790            Bug(xp, "what's being allocated here ?");
1791          }
[14119]1792        }
[14354]1793        /* Whatever we finished allocating, reset allocptr/allocbase to
1794           VOID_ALLOCPTR */
1795        xpGPR(xp,allocptr) = VOID_ALLOCPTR;
[16341]1796        tcr->save_allocptr = tcr->save_allocbase = (LispObj *)VOID_ALLOCPTR;
[14119]1797      }
[14791]1798      return;
[14119]1799    }
1800    return;
1801  }
[14791]1802  {
1803    lisp_frame *swap_frame = NULL;
1804    pc base = &swap_lr_lisp_frame_temp0;
1805   
1806    if ((program_counter >base)             /* sic */
1807        && (program_counter < (base+3))) {
1808      swap_frame = (lisp_frame *)xpGPR(xp,temp0);
1809    } else {
1810      base = &swap_lr_lisp_frame_arg_z;
1811      if ((program_counter > base) && (program_counter < (base+3))) { 
1812        swap_frame = (lisp_frame *)xpGPR(xp,arg_z);
1813      }
1814    }
1815    if (swap_frame) {
1816      if (program_counter == (base+1)) {
1817        swap_frame->savelr = xpGPR(xp,Rlr);
1818      }
1819      xpGPR(xp,Rlr) = xpGPR(xp,imm0);
1820      xpPC(xp) = base+3;
1821      return;
1822    }
1823  }
[14119]1824}
1825
1826void
1827interrupt_handler (int signum, siginfo_t *info, ExceptionInformation *context)
1828{
1829  TCR *tcr = get_interrupt_tcr(false);
1830  if (tcr) {
1831    if (TCR_INTERRUPT_LEVEL(tcr) < 0) {
1832      tcr->interrupt_pending = 1 << fixnumshift;
1833    } else {
1834      LispObj cmain = nrs_CMAIN.vcell;
1835
1836      if ((fulltag_of(cmain) == fulltag_misc) &&
1837          (header_subtag(header_of(cmain)) == subtag_macptr)) {
1838        /*
1839           This thread can (allegedly) take an interrupt now.
1840           It's tricky to do that if we're executing
1841           foreign code (especially Linuxthreads code, much
1842           of which isn't reentrant.)
1843           If we're unwinding the stack, we also want to defer
1844           the interrupt.
1845        */
1846        if ((tcr->valence != TCR_STATE_LISP) ||
1847            (tcr->unwinding != 0)) {
1848          tcr->interrupt_pending = 1 << fixnumshift;
1849        } else {
1850          xframe_list xframe_link;
1851          int old_valence;
1852          signed_natural disp=0;
1853          natural old_last_lisp_frame = tcr->last_lisp_frame;
1854         
1855          tcr->last_lisp_frame = xpGPR(context,Rsp);
1856          pc_luser_xp(context, tcr, &disp);
1857          old_valence = prepare_to_wait_for_exception_lock(tcr, context);
1858          wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1859          handle_exception(signum, context, tcr, info, old_valence);
1860          if (disp) {
1861            xpGPR(context,allocptr) -= disp;
1862          }
1863          unlock_exception_lock_in_handler(tcr);
1864          exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1865        }
1866      }
1867    }
1868  }
1869#ifdef DARWIN
1870    DarwinSigReturn(context);
1871#endif
1872}
1873
1874#ifdef USE_SIGALTSTACK
1875void
1876altstack_interrupt_handler(int signum, siginfo_t *info, ExceptionInformation *context)
1877{
1878  invoke_handler_on_main_stack(signum, info, context, __builtin_return_address(0),interrupt_handler);
1879}
1880#endif
1881
1882
1883void
[14873]1884install_signal_handler(int signo, void *handler, unsigned flags)
[14119]1885{
1886  struct sigaction sa;
[14873]1887  int err;
[14549]1888
1889  sigfillset(&sa.sa_mask);
[14119]1890 
1891  sa.sa_sigaction = (void *)handler;
1892  sigfillset(&sa.sa_mask);
[14873]1893  sa.sa_flags = SA_SIGINFO;
1894
1895#ifdef ANDROID
[15025]1896  sa.sa_flags |= SA_NODEFER;
[14873]1897#endif
[14119]1898#ifdef USE_SIGALTSTACK
[14873]1899  if (flags & ON_ALTSTACK)
1900    sa.sa_flags |= SA_ONSTACK;
[14119]1901#endif
[14873]1902  if (flags & RESTART_SYSCALLS)
1903    sa.sa_flags |= SA_RESTART;
1904  if (flags & RESERVE_FOR_LISP) {
1905    extern sigset_t user_signals_reserved;
1906    sigaddset(&user_signals_reserved, signo);
1907  }
[14119]1908
[14873]1909  err = sigaction(signo, &sa, NULL);
1910  if (err) {
1911    perror("sigaction");
1912    exit(1);
1913  }
[14119]1914}
1915
1916
1917void
1918install_pmcl_exception_handlers()
1919{
[15470]1920  install_signal_handler(SIGILL, (void *)sigill_handler, RESERVE_FOR_LISP);
1921  install_signal_handler(SIGSEGV, (void *)ALTSTACK(signal_handler),
1922                         RESERVE_FOR_LISP|ON_ALTSTACK);
1923  install_signal_handler(SIGBUS, (void *)ALTSTACK(signal_handler),
[14873]1924                           RESERVE_FOR_LISP|ON_ALTSTACK);
[14119]1925  install_signal_handler(SIGNAL_FOR_PROCESS_INTERRUPT,
[14873]1926                         (void *)interrupt_handler, RESERVE_FOR_LISP);
[14119]1927  signal(SIGPIPE, SIG_IGN);
1928}
1929
[15093]1930
[14119]1931#ifdef USE_SIGALTSTACK
1932void
1933setup_sigaltstack(area *a)
1934{
1935  stack_t stack;
1936#if 0
1937  stack.ss_sp = a->low;
1938  a->low += SIGSTKSZ*8;
1939#endif
1940  stack.ss_size = SIGSTKSZ*8;
1941  stack.ss_flags = 0;
1942  stack.ss_sp = mmap(NULL,stack.ss_size, PROT_READ|PROT_WRITE|PROT_EXEC,MAP_ANON|MAP_PRIVATE,-1,0);
1943  if (sigaltstack(&stack, NULL) != 0) {
1944    perror("sigaltstack");
1945    exit(-1);
1946  }
1947}
1948#endif
1949
1950void
1951thread_kill_handler(int signum, siginfo_t info, ExceptionInformation *xp)
1952{
1953  TCR *tcr = get_tcr(false);
1954  area *a;
[14549]1955#ifndef ANDROID
[14119]1956  sigset_t mask;
1957 
1958  sigemptyset(&mask);
[14549]1959#else
1960  int mask[] = {0,0};
1961#endif
[14119]1962
1963  if (tcr) {
1964    tcr->valence = TCR_STATE_FOREIGN;
1965    a = tcr->vs_area;
1966    if (a) {
1967      a->active = a->high;
1968    }
1969    a = tcr->cs_area;
1970    if (a) {
1971      a->active = a->high;
1972    }
1973  }
1974 
[14549]1975  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask,NULL);
[14119]1976  pthread_exit(NULL);
1977}
1978
1979#ifdef USE_SIGALTSTACK
1980void
1981altstack_thread_kill_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1982{
1983  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), thread_kill_handler);
1984}
1985#endif
1986
1987void
1988thread_signal_setup()
1989{
1990  thread_suspend_signal = SIG_SUSPEND_THREAD;
1991  thread_kill_signal = SIG_KILL_THREAD;
1992
[14873]1993  install_signal_handler(thread_suspend_signal, (void *)suspend_resume_handler,
[14876]1994                         RESERVE_FOR_LISP|RESTART_SYSCALLS);
[14873]1995  install_signal_handler(thread_kill_signal, (void *)thread_kill_handler,
1996                         RESERVE_FOR_LISP);
[14119]1997}
1998
1999
2000
2001void
2002unprotect_all_areas()
2003{
2004  protected_area_ptr p;
2005
2006  for(p = AllProtectedAreas, AllProtectedAreas = NULL; p; p = p->next) {
2007    unprotect_area(p);
2008  }
2009}
2010
2011/*
2012  A binding subprim has just done "twlle limit_regno,idx_regno" and
2013  the trap's been taken.  Extend the tcr's tlb so that the index will
2014  be in bounds and the new limit will be on a page boundary, filling
2015  in the new page(s) with 'no_thread_local_binding_marker'.  Update
2016  the tcr fields and the registers in the xp and return true if this
2017  all works, false otherwise.
2018
2019  Note that the tlb was allocated via malloc, so realloc can do some
2020  of the hard work.
2021*/
2022Boolean
2023extend_tcr_tlb(TCR *tcr, 
2024               ExceptionInformation *xp, 
2025               unsigned idx_regno)
2026{
2027  unsigned
2028    index = (unsigned) (xpGPR(xp,idx_regno)),
2029    old_limit = tcr->tlb_limit,
2030    new_limit = align_to_power_of_2(index+1,12),
2031    new_bytes = new_limit-old_limit;
2032  LispObj
2033    *old_tlb = tcr->tlb_pointer,
2034    *new_tlb = realloc(old_tlb, new_limit),
2035    *work;
2036
2037  if (new_tlb == NULL) {
2038    return false;
2039  }
2040 
2041  work = (LispObj *) ((BytePtr)new_tlb+old_limit);
2042
2043  while (new_bytes) {
2044    *work++ = no_thread_local_binding_marker;
2045    new_bytes -= sizeof(LispObj);
2046  }
2047  tcr->tlb_pointer = new_tlb;
2048  tcr->tlb_limit = new_limit;
2049  return true;
2050}
2051
2052
2053
2054void
2055exception_init()
2056{
2057  install_pmcl_exception_handlers();
2058}
2059
2060
2061
2062
2063
[15802]2064
2065
2066void
2067early_signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context)
2068{
2069  extern pc feature_check_fpu,feature_check_ldrex,feature_check_clrex;
2070  extern int arm_architecture_version;
2071 
2072  if ((xpPC(context) == feature_check_fpu) ||
2073      (xpPC(context) == feature_check_ldrex)) {
2074    arm_architecture_version = 5;
2075  } else {
2076    arm_architecture_version = 6;
2077  }
2078  xpPC(context) = xpLR(context);
2079}
Note: See TracBrowser for help on using the repository browser.