1 | |
---|
2 | |
---|
3 | /* |
---|
4 | Copyright (C) 2010 Clozure Associates |
---|
5 | This file is part of Clozure CL. |
---|
6 | |
---|
7 | Clozure CL is licensed under the terms of the Lisp Lesser GNU Public |
---|
8 | License , known as the LLGPL and distributed with Clozure CL as the |
---|
9 | file "LICENSE". The LLGPL consists of a preamble and the LGPL, |
---|
10 | which is distributed with Clozure CL as the file "LGPL". Where these |
---|
11 | conflict, the preamble takes precedence. |
---|
12 | |
---|
13 | Clozure CL is referenced in the preamble as the "LIBRARY." |
---|
14 | |
---|
15 | The LLGPL is also available online at |
---|
16 | http://opensource.franz.com/preamble.html |
---|
17 | */ |
---|
18 | |
---|
19 | #include "lisp.h" |
---|
20 | #include "lisp-exceptions.h" |
---|
21 | #include "lisp_globals.h" |
---|
22 | #include <ctype.h> |
---|
23 | #include <stdio.h> |
---|
24 | #include <stddef.h> |
---|
25 | #include <string.h> |
---|
26 | #include <stdarg.h> |
---|
27 | #include <errno.h> |
---|
28 | #include <stdio.h> |
---|
29 | #ifdef LINUX |
---|
30 | #include <strings.h> |
---|
31 | #include <sys/mman.h> |
---|
32 | #ifndef ANDROID |
---|
33 | #include <fpu_control.h> |
---|
34 | #include <linux/prctl.h> |
---|
35 | #endif |
---|
36 | #endif |
---|
37 | |
---|
38 | #ifdef DARWIN |
---|
39 | #include <sys/mman.h> |
---|
40 | #ifndef SA_NODEFER |
---|
41 | #define SA_NODEFER 0 |
---|
42 | #endif |
---|
43 | #include <sysexits.h> |
---|
44 | |
---|
45 | |
---|
46 | /* a distinguished UUO at a distinguished address */ |
---|
47 | extern void pseudo_sigreturn(ExceptionInformation *); |
---|
48 | #endif |
---|
49 | |
---|
50 | |
---|
51 | #include "threads.h" |
---|
52 | |
---|
53 | #ifdef ANDROID |
---|
54 | #define pthread_sigmask(how,in,out) rt_sigprocmask(how,in,out,8) |
---|
55 | #endif |
---|
56 | |
---|
57 | #ifdef LINUX |
---|
58 | |
---|
59 | void |
---|
60 | enable_fp_exceptions() |
---|
61 | { |
---|
62 | } |
---|
63 | |
---|
64 | void |
---|
65 | disable_fp_exceptions() |
---|
66 | { |
---|
67 | } |
---|
68 | #endif |
---|
69 | |
---|
70 | /* |
---|
71 | Handle exceptions. |
---|
72 | |
---|
73 | */ |
---|
74 | |
---|
75 | extern LispObj lisp_nil; |
---|
76 | |
---|
77 | extern natural lisp_heap_gc_threshold; |
---|
78 | extern Boolean grow_dynamic_area(natural); |
---|
79 | |
---|
80 | Boolean allocation_enabled = true; |
---|
81 | |
---|
82 | |
---|
83 | |
---|
84 | |
---|
85 | int |
---|
86 | page_size = 4096; |
---|
87 | |
---|
88 | int |
---|
89 | log2_page_size = 12; |
---|
90 | |
---|
91 | |
---|
92 | |
---|
93 | |
---|
94 | |
---|
95 | /* |
---|
96 | If the PC is pointing to an allocation trap, the previous instruction |
---|
97 | must have decremented allocptr. Return the non-zero amount by which |
---|
98 | allocptr was decremented. |
---|
99 | */ |
---|
100 | signed_natural |
---|
101 | allocptr_displacement(ExceptionInformation *xp) |
---|
102 | { |
---|
103 | pc program_counter = xpPC(xp); |
---|
104 | opcode instr = *program_counter, prev_instr; |
---|
105 | int delta = -3; |
---|
106 | |
---|
107 | if (IS_ALLOC_TRAP(instr)) { |
---|
108 | /* The alloc trap must have been preceded by a cmp and a |
---|
109 | load from tcr.allocbase. */ |
---|
110 | if (IS_BRANCH_AROUND_ALLOC_TRAP(program_counter[-1])) { |
---|
111 | delta = -4; |
---|
112 | } |
---|
113 | prev_instr = program_counter[delta]; |
---|
114 | |
---|
115 | if (IS_SUB_RM_FROM_ALLOCPTR(prev_instr)) { |
---|
116 | return -((signed_natural)xpGPR(xp,RM_field(prev_instr))); |
---|
117 | } |
---|
118 | |
---|
119 | if (IS_SUB_LO_FROM_ALLOCPTR(prev_instr)) { |
---|
120 | return -((signed_natural)(prev_instr & 0xff)); |
---|
121 | } |
---|
122 | |
---|
123 | if (IS_SUB_FROM_ALLOCPTR(prev_instr)) { |
---|
124 | natural disp = ror(prev_instr&0xff,(prev_instr&0xf00)>>7); |
---|
125 | |
---|
126 | instr = program_counter[delta-1]; |
---|
127 | if (IS_SUB_LO_FROM_ALLOCPTR(instr)) { |
---|
128 | return -((signed_natural)(disp | (instr & 0xff))); |
---|
129 | } |
---|
130 | } |
---|
131 | Bug(xp, "Can't determine allocation displacement"); |
---|
132 | } |
---|
133 | return 0; |
---|
134 | } |
---|
135 | |
---|
136 | |
---|
137 | /* |
---|
138 | A cons cell's been successfully allocated, but the allocptr's |
---|
139 | still tagged (as fulltag_cons, of course.) Emulate any instructions |
---|
140 | that might follow the allocation (stores to the car or cdr, an |
---|
141 | assignment to the "result" gpr) that take place while the allocptr's |
---|
142 | tag is non-zero, advancing over each such instruction. When we're |
---|
143 | done, the cons cell will be allocated and initialized, the result |
---|
144 | register will point to it, the allocptr will be untagged, and |
---|
145 | the PC will point past the instruction that clears the allocptr's |
---|
146 | tag. |
---|
147 | */ |
---|
148 | void |
---|
149 | finish_allocating_cons(ExceptionInformation *xp) |
---|
150 | { |
---|
151 | pc program_counter = xpPC(xp); |
---|
152 | opcode instr; |
---|
153 | LispObj cur_allocptr = xpGPR(xp, allocptr); |
---|
154 | cons *c = (cons *)ptr_from_lispobj(untag(cur_allocptr)); |
---|
155 | int target_reg; |
---|
156 | |
---|
157 | while (1) { |
---|
158 | instr = *program_counter++; |
---|
159 | |
---|
160 | if (IS_CLR_ALLOCPTR_TAG(instr)) { |
---|
161 | xpGPR(xp, allocptr) = untag(cur_allocptr); |
---|
162 | xpPC(xp) = program_counter; |
---|
163 | return; |
---|
164 | } else if (IS_SET_ALLOCPTR_CAR_RD(instr)) { |
---|
165 | c->car = xpGPR(xp,RD_field(instr)); |
---|
166 | } else if (IS_SET_ALLOCPTR_CDR_RD(instr)) { |
---|
167 | c->cdr = xpGPR(xp,RD_field(instr)); |
---|
168 | } else { |
---|
169 | /* assert(IS_SET_ALLOCPTR_RESULT_RD(instr)) */ |
---|
170 | xpGPR(xp,RD_field(instr)) = cur_allocptr; |
---|
171 | } |
---|
172 | } |
---|
173 | } |
---|
174 | |
---|
175 | /* |
---|
176 | We were interrupted in the process of allocating a uvector; we |
---|
177 | survived the allocation trap, and allocptr is tagged as fulltag_misc. |
---|
178 | Emulate any instructions which store a header into the uvector, |
---|
179 | assign the value of allocptr to some other register, and clear |
---|
180 | allocptr's tag. Don't expect/allow any other instructions in |
---|
181 | this environment. |
---|
182 | */ |
---|
183 | void |
---|
184 | finish_allocating_uvector(ExceptionInformation *xp) |
---|
185 | { |
---|
186 | pc program_counter = xpPC(xp); |
---|
187 | opcode instr; |
---|
188 | LispObj cur_allocptr = xpGPR(xp, allocptr); |
---|
189 | int target_reg; |
---|
190 | |
---|
191 | while (1) { |
---|
192 | instr = *program_counter++; |
---|
193 | if (IS_CLR_ALLOCPTR_TAG(instr)) { |
---|
194 | xpGPR(xp, allocptr) = untag(cur_allocptr); |
---|
195 | xpPC(xp) = program_counter; |
---|
196 | return; |
---|
197 | } |
---|
198 | if (IS_SET_ALLOCPTR_HEADER_RD(instr)) { |
---|
199 | header_of(cur_allocptr) = xpGPR(xp,RD_field(instr)); |
---|
200 | } else if (IS_SET_ALLOCPTR_RESULT_RD(instr)) { |
---|
201 | xpGPR(xp,RD_field(instr)) = cur_allocptr; |
---|
202 | } else { |
---|
203 | Bug(xp, "Unexpected instruction following alloc trap at " LISP ":",program_counter); |
---|
204 | } |
---|
205 | } |
---|
206 | } |
---|
207 | |
---|
208 | |
---|
209 | Boolean |
---|
210 | allocate_object(ExceptionInformation *xp, |
---|
211 | natural bytes_needed, |
---|
212 | signed_natural disp_from_allocptr, |
---|
213 | TCR *tcr) |
---|
214 | { |
---|
215 | area *a = active_dynamic_area; |
---|
216 | |
---|
217 | /* Maybe do an EGC */ |
---|
218 | if (a->older && lisp_global(OLDEST_EPHEMERAL)) { |
---|
219 | if (((a->active)-(a->low)) >= a->threshold) { |
---|
220 | gc_from_xp(xp, 0L); |
---|
221 | } |
---|
222 | } |
---|
223 | |
---|
224 | /* Life is pretty simple if we can simply grab a segment |
---|
225 | without extending the heap. |
---|
226 | */ |
---|
227 | if (new_heap_segment(xp, bytes_needed, false, tcr, NULL)) { |
---|
228 | xpGPR(xp, allocptr) += disp_from_allocptr; |
---|
229 | return true; |
---|
230 | } |
---|
231 | |
---|
232 | /* It doesn't make sense to try a full GC if the object |
---|
233 | we're trying to allocate is larger than everything |
---|
234 | allocated so far. |
---|
235 | */ |
---|
236 | if ((lisp_global(HEAP_END)-lisp_global(HEAP_START)) > bytes_needed) { |
---|
237 | untenure_from_area(tenured_area); /* force a full GC */ |
---|
238 | gc_from_xp(xp, 0L); |
---|
239 | } |
---|
240 | |
---|
241 | /* Try again, growing the heap if necessary */ |
---|
242 | if (new_heap_segment(xp, bytes_needed, true, tcr, NULL)) { |
---|
243 | xpGPR(xp, allocptr) += disp_from_allocptr; |
---|
244 | return true; |
---|
245 | } |
---|
246 | |
---|
247 | return false; |
---|
248 | } |
---|
249 | |
---|
250 | #ifndef XNOMEM |
---|
251 | #define XNOMEM 10 |
---|
252 | #endif |
---|
253 | |
---|
254 | void |
---|
255 | update_bytes_allocated(TCR* tcr, void *cur_allocptr) |
---|
256 | { |
---|
257 | BytePtr |
---|
258 | last = (BytePtr) tcr->last_allocptr, |
---|
259 | current = (BytePtr) cur_allocptr; |
---|
260 | if (last && (cur_allocptr != ((void *)VOID_ALLOCPTR))) { |
---|
261 | tcr->bytes_allocated += last-current; |
---|
262 | } |
---|
263 | tcr->last_allocptr = 0; |
---|
264 | } |
---|
265 | |
---|
266 | void |
---|
267 | lisp_allocation_failure(ExceptionInformation *xp, TCR *tcr, natural bytes_needed) |
---|
268 | { |
---|
269 | /* Couldn't allocate the object. If it's smaller than some arbitrary |
---|
270 | size (say 128K bytes), signal a "chronically out-of-memory" condition; |
---|
271 | else signal a "allocation request failed" condition. |
---|
272 | */ |
---|
273 | xpGPR(xp,allocptr) = xpGPR(xp,allocbase) = VOID_ALLOCPTR; |
---|
274 | handle_error(xp, bytes_needed < (128<<10) ? XNOMEM : error_alloc_failed,0, NULL); |
---|
275 | } |
---|
276 | |
---|
277 | /* |
---|
278 | Allocate a large list, where "large" means "large enough to |
---|
279 | possibly trigger the EGC several times if this was done |
---|
280 | by individually allocating each CONS." The number of |
---|
281 | ocnses in question is in arg_z; on successful return, |
---|
282 | the list will be in arg_z |
---|
283 | */ |
---|
284 | |
---|
285 | Boolean |
---|
286 | allocate_list(ExceptionInformation *xp, TCR *tcr) |
---|
287 | { |
---|
288 | natural |
---|
289 | nconses = (unbox_fixnum(xpGPR(xp,arg_z))), |
---|
290 | bytes_needed = (nconses << dnode_shift); |
---|
291 | LispObj |
---|
292 | prev = lisp_nil, |
---|
293 | current, |
---|
294 | initial = xpGPR(xp,arg_y); |
---|
295 | |
---|
296 | if (nconses == 0) { |
---|
297 | /* Silly case */ |
---|
298 | xpGPR(xp,arg_z) = lisp_nil; |
---|
299 | xpGPR(xp,allocptr) = lisp_nil; |
---|
300 | return true; |
---|
301 | } |
---|
302 | update_bytes_allocated(tcr, (void *)(void *) tcr->save_allocptr); |
---|
303 | if (allocate_object(xp,bytes_needed,(-bytes_needed)+fulltag_cons,tcr)) { |
---|
304 | for (current = xpGPR(xp,allocptr); |
---|
305 | nconses; |
---|
306 | prev = current, current+= dnode_size, nconses--) { |
---|
307 | deref(current,0) = prev; |
---|
308 | deref(current,1) = initial; |
---|
309 | } |
---|
310 | xpGPR(xp,arg_z) = prev; |
---|
311 | xpGPR(xp,arg_y) = xpGPR(xp,allocptr); |
---|
312 | xpGPR(xp,allocptr)-=fulltag_cons; |
---|
313 | } else { |
---|
314 | lisp_allocation_failure(xp,tcr,bytes_needed); |
---|
315 | } |
---|
316 | return true; |
---|
317 | } |
---|
318 | |
---|
319 | Boolean |
---|
320 | handle_alloc_trap(ExceptionInformation *xp, TCR *tcr) |
---|
321 | { |
---|
322 | pc program_counter; |
---|
323 | natural cur_allocptr, bytes_needed = 0; |
---|
324 | opcode prev_instr; |
---|
325 | signed_natural disp = 0; |
---|
326 | unsigned allocptr_tag; |
---|
327 | |
---|
328 | if (!allocation_enabled) { |
---|
329 | /* Back up before the alloc_trap, then let pc_luser_xp() back |
---|
330 | up some more. */ |
---|
331 | xpPC(xp)-=1; |
---|
332 | pc_luser_xp(xp,tcr, NULL); |
---|
333 | allocation_enabled = true; |
---|
334 | tcr->save_allocbase = (void *)VOID_ALLOCPTR; |
---|
335 | handle_error(xp, error_allocation_disabled,0,NULL); |
---|
336 | return true; |
---|
337 | } |
---|
338 | |
---|
339 | cur_allocptr = xpGPR(xp,allocptr); |
---|
340 | |
---|
341 | allocptr_tag = fulltag_of(cur_allocptr); |
---|
342 | |
---|
343 | switch (allocptr_tag) { |
---|
344 | case fulltag_cons: |
---|
345 | bytes_needed = sizeof(cons); |
---|
346 | disp = -sizeof(cons) + fulltag_cons; |
---|
347 | break; |
---|
348 | |
---|
349 | case fulltag_misc: |
---|
350 | disp = allocptr_displacement(xp); |
---|
351 | bytes_needed = (-disp) + fulltag_misc; |
---|
352 | break; |
---|
353 | |
---|
354 | /* else fall thru */ |
---|
355 | default: |
---|
356 | return false; |
---|
357 | } |
---|
358 | |
---|
359 | update_bytes_allocated(tcr,((BytePtr)(cur_allocptr-disp))); |
---|
360 | if (allocate_object(xp, bytes_needed, disp, tcr)) { |
---|
361 | adjust_exception_pc(xp,4); |
---|
362 | return true; |
---|
363 | } |
---|
364 | lisp_allocation_failure(xp,tcr,bytes_needed); |
---|
365 | return true; |
---|
366 | } |
---|
367 | |
---|
368 | natural gc_deferred = 0, full_gc_deferred = 0; |
---|
369 | |
---|
370 | signed_natural |
---|
371 | flash_freeze(TCR *tcr, signed_natural param) |
---|
372 | { |
---|
373 | return 0; |
---|
374 | } |
---|
375 | |
---|
376 | Boolean |
---|
377 | handle_gc_trap(ExceptionInformation *xp, TCR *tcr) |
---|
378 | { |
---|
379 | LispObj |
---|
380 | selector = xpGPR(xp,imm0), |
---|
381 | arg = xpGPR(xp,imm1); |
---|
382 | area *a = active_dynamic_area; |
---|
383 | Boolean egc_was_enabled = (a->older != NULL); |
---|
384 | natural gc_previously_deferred = gc_deferred; |
---|
385 | |
---|
386 | |
---|
387 | switch (selector) { |
---|
388 | case GC_TRAP_FUNCTION_EGC_CONTROL: |
---|
389 | egc_control(arg != 0, a->active); |
---|
390 | xpGPR(xp,arg_z) = lisp_nil + (egc_was_enabled ? t_offset : 0); |
---|
391 | break; |
---|
392 | |
---|
393 | case GC_TRAP_FUNCTION_CONFIGURE_EGC: |
---|
394 | a->threshold = unbox_fixnum(xpGPR(xp, arg_x)); |
---|
395 | g1_area->threshold = unbox_fixnum(xpGPR(xp, arg_y)); |
---|
396 | g2_area->threshold = unbox_fixnum(xpGPR(xp, arg_z)); |
---|
397 | xpGPR(xp,arg_z) = lisp_nil+t_offset; |
---|
398 | break; |
---|
399 | |
---|
400 | case GC_TRAP_FUNCTION_SET_LISP_HEAP_THRESHOLD: |
---|
401 | if (((signed_natural) arg) > 0) { |
---|
402 | lisp_heap_gc_threshold = |
---|
403 | align_to_power_of_2((arg-1) + |
---|
404 | (heap_segment_size - 1), |
---|
405 | log2_heap_segment_size); |
---|
406 | } |
---|
407 | /* fall through */ |
---|
408 | case GC_TRAP_FUNCTION_GET_LISP_HEAP_THRESHOLD: |
---|
409 | xpGPR(xp, imm0) = lisp_heap_gc_threshold; |
---|
410 | break; |
---|
411 | |
---|
412 | case GC_TRAP_FUNCTION_USE_LISP_HEAP_THRESHOLD: |
---|
413 | /* Try to put the current threshold in effect. This may |
---|
414 | need to disable/reenable the EGC. */ |
---|
415 | untenure_from_area(tenured_area); |
---|
416 | resize_dynamic_heap(a->active,lisp_heap_gc_threshold); |
---|
417 | if (egc_was_enabled) { |
---|
418 | if ((a->high - a->active) >= a->threshold) { |
---|
419 | tenure_to_area(tenured_area); |
---|
420 | } |
---|
421 | } |
---|
422 | xpGPR(xp, imm0) = lisp_heap_gc_threshold; |
---|
423 | break; |
---|
424 | |
---|
425 | case GC_TRAP_FUNCTION_ENSURE_STATIC_CONSES: |
---|
426 | ensure_static_conses(xp,tcr,32768); |
---|
427 | break; |
---|
428 | |
---|
429 | case GC_TRAP_FUNCTION_FLASH_FREEZE: |
---|
430 | untenure_from_area(tenured_area); |
---|
431 | gc_like_from_xp(xp,flash_freeze,0); |
---|
432 | a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size); |
---|
433 | tenured_area->static_dnodes = area_dnode(a->active, a->low); |
---|
434 | if (egc_was_enabled) { |
---|
435 | tenure_to_area(tenured_area); |
---|
436 | } |
---|
437 | xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift; |
---|
438 | break; |
---|
439 | |
---|
440 | case GC_TRAP_FUNCTION_ALLOCATION_CONTROL: |
---|
441 | switch(arg) { |
---|
442 | case 0: /* disable if allocation enabled */ |
---|
443 | xpGPR(xp, arg_z) = lisp_nil; |
---|
444 | if (allocation_enabled) { |
---|
445 | TCR *other_tcr; |
---|
446 | ExceptionInformation *other_context; |
---|
447 | suspend_other_threads(true); |
---|
448 | normalize_tcr(xp,tcr,false); |
---|
449 | for (other_tcr=tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) { |
---|
450 | other_context = other_tcr->pending_exception_context; |
---|
451 | if (other_context == NULL) { |
---|
452 | other_context = other_tcr->suspend_context; |
---|
453 | } |
---|
454 | normalize_tcr(other_context, other_tcr, true); |
---|
455 | } |
---|
456 | allocation_enabled = false; |
---|
457 | xpGPR(xp, arg_z) = t_value; |
---|
458 | resume_other_threads(true); |
---|
459 | } |
---|
460 | break; |
---|
461 | |
---|
462 | case 1: /* enable if disabled */ |
---|
463 | xpGPR(xp, arg_z) = lisp_nil; |
---|
464 | if (!allocation_enabled) { |
---|
465 | allocation_enabled = true; |
---|
466 | xpGPR(xp, arg_z) = t_value; |
---|
467 | } |
---|
468 | break; |
---|
469 | |
---|
470 | default: |
---|
471 | xpGPR(xp, arg_z) = lisp_nil; |
---|
472 | if (allocation_enabled) { |
---|
473 | xpGPR(xp, arg_z) = t_value; |
---|
474 | } |
---|
475 | break; |
---|
476 | } |
---|
477 | break; |
---|
478 | |
---|
479 | |
---|
480 | default: |
---|
481 | update_bytes_allocated(tcr, (void *) ptr_from_lispobj(xpGPR(xp, allocptr))); |
---|
482 | |
---|
483 | if (selector == GC_TRAP_FUNCTION_IMMEDIATE_GC) { |
---|
484 | if (!full_gc_deferred) { |
---|
485 | gc_from_xp(xp, 0L); |
---|
486 | break; |
---|
487 | } |
---|
488 | /* Tried to do a full GC when gc was disabled. That failed, |
---|
489 | so try full GC now */ |
---|
490 | selector = GC_TRAP_FUNCTION_GC; |
---|
491 | } |
---|
492 | |
---|
493 | if (egc_was_enabled) { |
---|
494 | egc_control(false, (BytePtr) a->active); |
---|
495 | } |
---|
496 | gc_from_xp(xp, 0L); |
---|
497 | if (gc_deferred > gc_previously_deferred) { |
---|
498 | full_gc_deferred = 1; |
---|
499 | } else { |
---|
500 | full_gc_deferred = 0; |
---|
501 | } |
---|
502 | if (selector > GC_TRAP_FUNCTION_GC) { |
---|
503 | if (selector & GC_TRAP_FUNCTION_IMPURIFY) { |
---|
504 | impurify_from_xp(xp, 0L); |
---|
505 | /* nrs_GC_EVENT_STATUS_BITS.vcell |= gc_integrity_check_bit; */ |
---|
506 | lisp_global(OLDSPACE_DNODE_COUNT) = 0; |
---|
507 | gc_from_xp(xp, 0L); |
---|
508 | } |
---|
509 | if (selector & GC_TRAP_FUNCTION_PURIFY) { |
---|
510 | purify_from_xp(xp, 0L); |
---|
511 | lisp_global(OLDSPACE_DNODE_COUNT) = 0; |
---|
512 | gc_from_xp(xp, 0L); |
---|
513 | } |
---|
514 | if (selector & GC_TRAP_FUNCTION_SAVE_APPLICATION) { |
---|
515 | OSErr err; |
---|
516 | extern OSErr save_application(unsigned, Boolean); |
---|
517 | TCR *tcr = get_tcr(true); |
---|
518 | area *vsarea = tcr->vs_area; |
---|
519 | |
---|
520 | nrs_TOPLFUNC.vcell = *((LispObj *)(vsarea->high)-1); |
---|
521 | err = save_application(arg, egc_was_enabled); |
---|
522 | if (err == noErr) { |
---|
523 | _exit(0); |
---|
524 | } |
---|
525 | fatal_oserr(": save_application", err); |
---|
526 | } |
---|
527 | switch (selector) { |
---|
528 | |
---|
529 | |
---|
530 | case GC_TRAP_FUNCTION_FREEZE: |
---|
531 | a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size); |
---|
532 | tenured_area->static_dnodes = area_dnode(a->active, a->low); |
---|
533 | xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift; |
---|
534 | break; |
---|
535 | default: |
---|
536 | break; |
---|
537 | } |
---|
538 | } |
---|
539 | |
---|
540 | if (egc_was_enabled) { |
---|
541 | egc_control(true, NULL); |
---|
542 | } |
---|
543 | break; |
---|
544 | |
---|
545 | } |
---|
546 | |
---|
547 | adjust_exception_pc(xp,4); |
---|
548 | return true; |
---|
549 | } |
---|
550 | |
---|
551 | |
---|
552 | |
---|
553 | void |
---|
554 | signal_stack_soft_overflow(ExceptionInformation *xp, unsigned reg) |
---|
555 | { |
---|
556 | /* The cstack just overflowed. Force the current thread's |
---|
557 | control stack to do so until all stacks are well under their overflow |
---|
558 | limits. |
---|
559 | */ |
---|
560 | |
---|
561 | #if 0 |
---|
562 | lisp_global(CS_OVERFLOW_LIMIT) = CS_OVERFLOW_FORCE_LIMIT; /* force unsigned traps to fail */ |
---|
563 | #endif |
---|
564 | handle_error(xp, error_stack_overflow, reg, NULL); |
---|
565 | } |
---|
566 | |
---|
567 | /* |
---|
568 | Lower (move toward 0) the "end" of the soft protected area associated |
---|
569 | with a by a page, if we can. |
---|
570 | */ |
---|
571 | |
---|
572 | void |
---|
573 | adjust_soft_protection_limit(area *a) |
---|
574 | { |
---|
575 | char *proposed_new_soft_limit = a->softlimit - 4096; |
---|
576 | protected_area_ptr p = a->softprot; |
---|
577 | |
---|
578 | if (proposed_new_soft_limit >= (p->start+16384)) { |
---|
579 | p->end = proposed_new_soft_limit; |
---|
580 | p->protsize = p->end-p->start; |
---|
581 | a->softlimit = proposed_new_soft_limit; |
---|
582 | } |
---|
583 | protect_area(p); |
---|
584 | } |
---|
585 | |
---|
586 | void |
---|
587 | restore_soft_stack_limit(unsigned stkreg) |
---|
588 | { |
---|
589 | area *a; |
---|
590 | TCR *tcr = get_tcr(true); |
---|
591 | |
---|
592 | switch (stkreg) { |
---|
593 | case Rsp: |
---|
594 | a = tcr->cs_area; |
---|
595 | if ((a->softlimit - 4096) > (a->hardlimit + 16384)) { |
---|
596 | a->softlimit -= 4096; |
---|
597 | } |
---|
598 | tcr->cs_limit = (LispObj)ptr_to_lispobj(a->softlimit); |
---|
599 | break; |
---|
600 | case vsp: |
---|
601 | a = tcr->vs_area; |
---|
602 | adjust_soft_protection_limit(a); |
---|
603 | break; |
---|
604 | } |
---|
605 | } |
---|
606 | |
---|
607 | /* Maybe this'll work someday. We may have to do something to |
---|
608 | make the thread look like it's not handling an exception */ |
---|
609 | void |
---|
610 | reset_lisp_process(ExceptionInformation *xp) |
---|
611 | { |
---|
612 | #if 0 |
---|
613 | TCR *tcr = get_tcr(true); |
---|
614 | catch_frame *last_catch = (catch_frame *) ptr_from_lispobj(untag(tcr->catch_top)); |
---|
615 | |
---|
616 | tcr->save_allocptr = (void *) ptr_from_lispobj(xpGPR(xp, allocptr)); |
---|
617 | |
---|
618 | tcr->save_vsp = (LispObj *) ptr_from_lispobj(((lisp_frame *)ptr_from_lispobj(last_catch->csp))->savevsp); |
---|
619 | |
---|
620 | start_lisp(tcr, 1); |
---|
621 | #endif |
---|
622 | } |
---|
623 | |
---|
624 | |
---|
625 | |
---|
626 | void |
---|
627 | platform_new_heap_segment(ExceptionInformation *xp, TCR *tcr, BytePtr low, BytePtr high) |
---|
628 | { |
---|
629 | tcr->last_allocptr = (void *)newlimit; |
---|
630 | xpGPR(xp,allocptr) = (LispObj) newlimit; |
---|
631 | tcr->save_allocbase = (void*) oldlimit; |
---|
632 | } |
---|
633 | |
---|
634 | void |
---|
635 | update_area_active (area **aptr, BytePtr value) |
---|
636 | { |
---|
637 | area *a = *aptr; |
---|
638 | for (; a; a = a->older) { |
---|
639 | if ((a->low <= value) && (a->high >= value)) break; |
---|
640 | }; |
---|
641 | if (a == NULL) Bug(NULL, "Can't find active area"); |
---|
642 | a->active = value; |
---|
643 | *aptr = a; |
---|
644 | |
---|
645 | for (a = a->younger; a; a = a->younger) { |
---|
646 | a->active = a->high; |
---|
647 | } |
---|
648 | } |
---|
649 | |
---|
650 | LispObj * |
---|
651 | tcr_frame_ptr(TCR *tcr) |
---|
652 | { |
---|
653 | ExceptionInformation *xp; |
---|
654 | LispObj *bp = NULL; |
---|
655 | |
---|
656 | if (tcr->pending_exception_context) |
---|
657 | xp = tcr->pending_exception_context; |
---|
658 | else { |
---|
659 | xp = tcr->suspend_context; |
---|
660 | } |
---|
661 | if (xp) { |
---|
662 | bp = (LispObj *) xpGPR(xp, Rsp); |
---|
663 | } |
---|
664 | return bp; |
---|
665 | } |
---|
666 | |
---|
667 | void |
---|
668 | normalize_tcr(ExceptionInformation *xp, TCR *tcr, Boolean is_other_tcr) |
---|
669 | |
---|
670 | { |
---|
671 | void *cur_allocptr = NULL; |
---|
672 | LispObj freeptr = 0; |
---|
673 | |
---|
674 | if (xp) { |
---|
675 | if (is_other_tcr) { |
---|
676 | pc_luser_xp(xp, tcr, NULL); |
---|
677 | freeptr = xpGPR(xp, allocptr); |
---|
678 | if (fulltag_of(freeptr) == 0){ |
---|
679 | cur_allocptr = (void *) ptr_from_lispobj(freeptr); |
---|
680 | } |
---|
681 | } |
---|
682 | update_area_active((area **)&tcr->cs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp))); |
---|
683 | update_area_active((area **)&tcr->vs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, vsp))); |
---|
684 | } else { |
---|
685 | /* In ff-call. */ |
---|
686 | cur_allocptr = (void *) (tcr->save_allocptr); |
---|
687 | update_area_active((area **)&tcr->vs_area, (BytePtr) tcr->save_vsp); |
---|
688 | update_area_active((area **)&tcr->cs_area, (BytePtr) tcr->last_lisp_frame); |
---|
689 | } |
---|
690 | |
---|
691 | |
---|
692 | tcr->save_allocptr = tcr->save_allocbase = (void *)VOID_ALLOCPTR; |
---|
693 | if (cur_allocptr) { |
---|
694 | update_bytes_allocated(tcr, cur_allocptr); |
---|
695 | if (freeptr) { |
---|
696 | xpGPR(xp, allocptr) = VOID_ALLOCPTR; |
---|
697 | } |
---|
698 | } |
---|
699 | } |
---|
700 | |
---|
701 | TCR *gc_tcr = NULL; |
---|
702 | |
---|
703 | /* Suspend and "normalize" other tcrs, then call a gc-like function |
---|
704 | in that context. Resume the other tcrs, then return what the |
---|
705 | function returned */ |
---|
706 | |
---|
707 | signed_natural |
---|
708 | gc_like_from_xp(ExceptionInformation *xp, |
---|
709 | signed_natural(*fun)(TCR *, signed_natural), |
---|
710 | signed_natural param) |
---|
711 | { |
---|
712 | TCR *tcr = get_tcr(true), *other_tcr; |
---|
713 | int result; |
---|
714 | signed_natural inhibit; |
---|
715 | |
---|
716 | suspend_other_threads(true); |
---|
717 | inhibit = (signed_natural)(lisp_global(GC_INHIBIT_COUNT)); |
---|
718 | if (inhibit != 0) { |
---|
719 | if (inhibit > 0) { |
---|
720 | lisp_global(GC_INHIBIT_COUNT) = (LispObj)(-inhibit); |
---|
721 | } |
---|
722 | resume_other_threads(true); |
---|
723 | gc_deferred++; |
---|
724 | return 0; |
---|
725 | } |
---|
726 | gc_deferred = 0; |
---|
727 | |
---|
728 | gc_tcr = tcr; |
---|
729 | |
---|
730 | xpGPR(xp, allocptr) = VOID_ALLOCPTR; |
---|
731 | |
---|
732 | normalize_tcr(xp, tcr, false); |
---|
733 | |
---|
734 | |
---|
735 | for (other_tcr = tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) { |
---|
736 | if (other_tcr->pending_exception_context) { |
---|
737 | other_tcr->gc_context = other_tcr->pending_exception_context; |
---|
738 | } else if (other_tcr->valence == TCR_STATE_LISP) { |
---|
739 | other_tcr->gc_context = other_tcr->suspend_context; |
---|
740 | } else { |
---|
741 | /* no pending exception, didn't suspend in lisp state: |
---|
742 | must have executed a synchronous ff-call. |
---|
743 | */ |
---|
744 | other_tcr->gc_context = NULL; |
---|
745 | } |
---|
746 | normalize_tcr(other_tcr->gc_context, other_tcr, true); |
---|
747 | } |
---|
748 | |
---|
749 | |
---|
750 | |
---|
751 | result = fun(tcr, param); |
---|
752 | |
---|
753 | other_tcr = tcr; |
---|
754 | do { |
---|
755 | other_tcr->gc_context = NULL; |
---|
756 | other_tcr = other_tcr->next; |
---|
757 | } while (other_tcr != tcr); |
---|
758 | |
---|
759 | gc_tcr = NULL; |
---|
760 | |
---|
761 | resume_other_threads(true); |
---|
762 | |
---|
763 | return result; |
---|
764 | |
---|
765 | } |
---|
766 | |
---|
767 | |
---|
768 | |
---|
769 | /* Returns #bytes freed by invoking GC */ |
---|
770 | |
---|
771 | signed_natural |
---|
772 | gc_from_tcr(TCR *tcr, signed_natural param) |
---|
773 | { |
---|
774 | area *a; |
---|
775 | BytePtr oldfree, newfree; |
---|
776 | BytePtr oldend, newend; |
---|
777 | |
---|
778 | a = active_dynamic_area; |
---|
779 | oldend = a->high; |
---|
780 | oldfree = a->active; |
---|
781 | gc(tcr, param); |
---|
782 | newfree = a->active; |
---|
783 | newend = a->high; |
---|
784 | #if 0 |
---|
785 | fprintf(dbgout, "End GC in 0x%lx\n", tcr); |
---|
786 | #endif |
---|
787 | return ((oldfree-newfree)+(newend-oldend)); |
---|
788 | } |
---|
789 | |
---|
790 | signed_natural |
---|
791 | gc_from_xp(ExceptionInformation *xp, signed_natural param) |
---|
792 | { |
---|
793 | signed_natural status = gc_like_from_xp(xp, gc_from_tcr, param); |
---|
794 | |
---|
795 | freeGCptrs(); |
---|
796 | return status; |
---|
797 | } |
---|
798 | |
---|
799 | signed_natural |
---|
800 | purify_from_xp(ExceptionInformation *xp, signed_natural param) |
---|
801 | { |
---|
802 | return gc_like_from_xp(xp, purify, param); |
---|
803 | } |
---|
804 | |
---|
805 | signed_natural |
---|
806 | impurify_from_xp(ExceptionInformation *xp, signed_natural param) |
---|
807 | { |
---|
808 | return gc_like_from_xp(xp, impurify, param); |
---|
809 | } |
---|
810 | |
---|
811 | |
---|
812 | |
---|
813 | |
---|
814 | |
---|
815 | |
---|
816 | protection_handler |
---|
817 | * protection_handlers[] = { |
---|
818 | do_spurious_wp_fault, |
---|
819 | do_soft_stack_overflow, |
---|
820 | do_soft_stack_overflow, |
---|
821 | do_soft_stack_overflow, |
---|
822 | do_hard_stack_overflow, |
---|
823 | do_hard_stack_overflow, |
---|
824 | do_hard_stack_overflow |
---|
825 | }; |
---|
826 | |
---|
827 | |
---|
828 | Boolean |
---|
829 | is_write_fault(ExceptionInformation *xp, siginfo_t *info) |
---|
830 | { |
---|
831 | return ((xpFaultStatus(xp) & 0x800) != 0); |
---|
832 | } |
---|
833 | |
---|
834 | Boolean |
---|
835 | handle_protection_violation(ExceptionInformation *xp, siginfo_t *info, TCR *tcr, int old_valence) |
---|
836 | { |
---|
837 | BytePtr addr; |
---|
838 | protected_area_ptr area; |
---|
839 | protection_handler *handler; |
---|
840 | extern Boolean touch_page(void *); |
---|
841 | extern void touch_page_end(void); |
---|
842 | |
---|
843 | #ifdef LINUX |
---|
844 | addr = (BytePtr) ((natural) (xpFaultAddress(xp))); |
---|
845 | #else |
---|
846 | if (info) { |
---|
847 | addr = (BytePtr)(info->si_addr); |
---|
848 | } else { |
---|
849 | addr = (BytePtr) ((natural) (xpFaultAddress(xp))); |
---|
850 | } |
---|
851 | #endif |
---|
852 | |
---|
853 | if (addr && (addr == tcr->safe_ref_address)) { |
---|
854 | adjust_exception_pc(xp,4); |
---|
855 | |
---|
856 | xpGPR(xp,imm0) = 0; |
---|
857 | return true; |
---|
858 | } |
---|
859 | |
---|
860 | if (xpPC(xp) == (pc)touch_page) { |
---|
861 | xpGPR(xp,imm0) = 0; |
---|
862 | xpPC(xp) = (pc)touch_page_end; |
---|
863 | return true; |
---|
864 | } |
---|
865 | |
---|
866 | |
---|
867 | if (is_write_fault(xp,info)) { |
---|
868 | area = find_protected_area(addr); |
---|
869 | if (area != NULL) { |
---|
870 | handler = protection_handlers[area->why]; |
---|
871 | return handler(xp, area, addr); |
---|
872 | } else { |
---|
873 | if ((addr >= readonly_area->low) && |
---|
874 | (addr < readonly_area->active)) { |
---|
875 | UnProtectMemory((LogicalAddress)(truncate_to_power_of_2(addr,log2_page_size)), |
---|
876 | page_size); |
---|
877 | return true; |
---|
878 | } |
---|
879 | } |
---|
880 | } |
---|
881 | if (old_valence == TCR_STATE_LISP) { |
---|
882 | LispObj cmain = nrs_CMAIN.vcell; |
---|
883 | |
---|
884 | if ((fulltag_of(cmain) == fulltag_misc) && |
---|
885 | (header_subtag(header_of(cmain)) == subtag_macptr)) { |
---|
886 | |
---|
887 | callback_for_trap(nrs_CMAIN.vcell, xp, is_write_fault(xp,info)?SIGBUS:SIGSEGV, (natural)addr, NULL); |
---|
888 | } |
---|
889 | } |
---|
890 | return false; |
---|
891 | } |
---|
892 | |
---|
893 | |
---|
894 | |
---|
895 | |
---|
896 | |
---|
897 | OSStatus |
---|
898 | do_hard_stack_overflow(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr) |
---|
899 | { |
---|
900 | #ifdef SUPPORT_PRAGMA_UNUSED |
---|
901 | #pragma unused(area,addr) |
---|
902 | #endif |
---|
903 | reset_lisp_process(xp); |
---|
904 | return -1; |
---|
905 | } |
---|
906 | |
---|
907 | extern area* |
---|
908 | allocate_vstack(natural useable); /* This is in "pmcl-kernel.c" */ |
---|
909 | |
---|
910 | extern area* |
---|
911 | allocate_tstack(natural useable); /* This is in "pmcl-kernel.c" */ |
---|
912 | |
---|
913 | |
---|
914 | |
---|
915 | |
---|
916 | |
---|
917 | |
---|
918 | Boolean |
---|
919 | lisp_frame_p(lisp_frame *spPtr) |
---|
920 | { |
---|
921 | return (spPtr->marker == lisp_frame_marker); |
---|
922 | } |
---|
923 | |
---|
924 | |
---|
925 | int ffcall_overflow_count = 0; |
---|
926 | |
---|
927 | |
---|
928 | |
---|
929 | |
---|
930 | |
---|
931 | |
---|
932 | /* Note: CURRENT_VS (CURRENT_TS) is always either the area containing |
---|
933 | the current value of VSP (TSP) or an older area. */ |
---|
934 | |
---|
935 | OSStatus |
---|
936 | do_vsp_overflow (ExceptionInformation *xp, BytePtr addr) |
---|
937 | { |
---|
938 | TCR* tcr = get_tcr(true); |
---|
939 | area *a = tcr->vs_area; |
---|
940 | protected_area_ptr vsp_soft = a->softprot; |
---|
941 | unprotect_area(vsp_soft); |
---|
942 | signal_stack_soft_overflow(xp,vsp); |
---|
943 | return 0; |
---|
944 | } |
---|
945 | |
---|
946 | |
---|
947 | |
---|
948 | OSStatus |
---|
949 | do_soft_stack_overflow(ExceptionInformation *xp, protected_area_ptr prot_area, BytePtr addr) |
---|
950 | { |
---|
951 | /* Trying to write into a guard page on the vstack or tstack. |
---|
952 | Allocate a new stack segment, emulate stwu and stwux for the TSP, and |
---|
953 | signal an error_stack_overflow condition. |
---|
954 | */ |
---|
955 | if (prot_area->why == kVSPsoftguard) { |
---|
956 | return do_vsp_overflow(xp,addr); |
---|
957 | } |
---|
958 | unprotect_area(prot_area); |
---|
959 | signal_stack_soft_overflow(xp,Rsp); |
---|
960 | return 0; |
---|
961 | } |
---|
962 | |
---|
963 | OSStatus |
---|
964 | do_spurious_wp_fault(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr) |
---|
965 | { |
---|
966 | #ifdef SUPPORT_PRAGMA_UNUSED |
---|
967 | #pragma unused(xp,area,addr) |
---|
968 | #endif |
---|
969 | return -1; |
---|
970 | } |
---|
971 | |
---|
972 | |
---|
973 | |
---|
974 | |
---|
975 | |
---|
976 | |
---|
977 | |
---|
978 | |
---|
979 | |
---|
980 | |
---|
981 | Boolean |
---|
982 | handle_sigfpe(ExceptionInformation *xp, TCR *tcr) |
---|
983 | { |
---|
984 | return false; |
---|
985 | } |
---|
986 | |
---|
987 | |
---|
988 | Boolean |
---|
989 | handle_unimplemented_instruction(ExceptionInformation *xp, |
---|
990 | opcode instruction, |
---|
991 | TCR *tcr) |
---|
992 | { |
---|
993 | |
---|
994 | return false; |
---|
995 | } |
---|
996 | |
---|
997 | Boolean |
---|
998 | handle_exception(int xnum, |
---|
999 | ExceptionInformation *xp, |
---|
1000 | TCR *tcr, |
---|
1001 | siginfo_t *info, |
---|
1002 | int old_valence) |
---|
1003 | { |
---|
1004 | pc program_counter; |
---|
1005 | opcode instruction = 0; |
---|
1006 | |
---|
1007 | if (old_valence != TCR_STATE_LISP) { |
---|
1008 | return false; |
---|
1009 | } |
---|
1010 | |
---|
1011 | program_counter = xpPC(xp); |
---|
1012 | |
---|
1013 | if ((xnum == SIGILL) | (xnum == SIGTRAP)) { |
---|
1014 | instruction = *program_counter; |
---|
1015 | } |
---|
1016 | |
---|
1017 | if (IS_ALLOC_TRAP(instruction)) { |
---|
1018 | return handle_alloc_trap(xp, tcr); |
---|
1019 | } else if ((xnum == SIGSEGV) || |
---|
1020 | (xnum == SIGBUS)) { |
---|
1021 | return handle_protection_violation(xp, info, tcr, old_valence); |
---|
1022 | } else if (xnum == SIGFPE) { |
---|
1023 | return handle_sigfpe(xp, tcr); |
---|
1024 | } else if ((xnum == SIGILL)) { |
---|
1025 | if (IS_GC_TRAP(instruction)) { |
---|
1026 | return handle_gc_trap(xp, tcr); |
---|
1027 | } else if (IS_UUO(instruction)) { |
---|
1028 | return handle_uuo(xp, info, instruction); |
---|
1029 | } else { |
---|
1030 | return handle_unimplemented_instruction(xp,instruction,tcr); |
---|
1031 | } |
---|
1032 | } else if (xnum == SIGNAL_FOR_PROCESS_INTERRUPT) { |
---|
1033 | tcr->interrupt_pending = 0; |
---|
1034 | callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL); |
---|
1035 | return true; |
---|
1036 | } |
---|
1037 | |
---|
1038 | return false; |
---|
1039 | } |
---|
1040 | |
---|
1041 | void |
---|
1042 | adjust_exception_pc(ExceptionInformation *xp, int delta) |
---|
1043 | { |
---|
1044 | xpPC(xp) += (delta >> 2); |
---|
1045 | } |
---|
1046 | |
---|
1047 | |
---|
1048 | /* |
---|
1049 | This wants to scan backwards until "where" points to an instruction |
---|
1050 | whose major opcode is either 63 (double-float) or 59 (single-float) |
---|
1051 | */ |
---|
1052 | |
---|
1053 | OSStatus |
---|
1054 | handle_fpux_binop(ExceptionInformation *xp, pc where) |
---|
1055 | { |
---|
1056 | OSStatus err = -1; |
---|
1057 | opcode *there = (opcode *) where, instr, errnum = 0; |
---|
1058 | return err; |
---|
1059 | } |
---|
1060 | |
---|
1061 | Boolean |
---|
1062 | handle_uuo(ExceptionInformation *xp, siginfo_t *info, opcode the_uuo) |
---|
1063 | { |
---|
1064 | unsigned |
---|
1065 | format = UUO_FORMAT(the_uuo); |
---|
1066 | Boolean handled = false; |
---|
1067 | int bump = 4; |
---|
1068 | TCR *tcr = get_tcr(true); |
---|
1069 | |
---|
1070 | switch (format) { |
---|
1071 | case uuo_format_kernel_service: |
---|
1072 | { |
---|
1073 | TCR * target = (TCR *)xpGPR(xp,arg_z); |
---|
1074 | int service = UUO_UNARY_field(the_uuo); |
---|
1075 | |
---|
1076 | switch (service) { |
---|
1077 | case error_propagate_suspend: |
---|
1078 | handled = true; |
---|
1079 | break; |
---|
1080 | case error_interrupt: |
---|
1081 | xpGPR(xp,imm0) = (LispObj) raise_thread_interrupt(target); |
---|
1082 | handled = true; |
---|
1083 | break; |
---|
1084 | case error_suspend: |
---|
1085 | xpGPR(xp,imm0) = (LispObj) lisp_suspend_tcr(target); |
---|
1086 | handled = true; |
---|
1087 | break; |
---|
1088 | case error_suspend_all: |
---|
1089 | lisp_suspend_other_threads(); |
---|
1090 | handled = true; |
---|
1091 | break; |
---|
1092 | case error_resume: |
---|
1093 | xpGPR(xp,imm0) = (LispObj) lisp_resume_tcr(target); |
---|
1094 | handled = true; |
---|
1095 | break; |
---|
1096 | case error_resume_all: |
---|
1097 | lisp_resume_other_threads(); |
---|
1098 | handled = true; |
---|
1099 | break; |
---|
1100 | case error_kill: |
---|
1101 | xpGPR(xp,imm0) = (LispObj)kill_tcr(target); |
---|
1102 | handled = true; |
---|
1103 | break; |
---|
1104 | case error_allocate_list: |
---|
1105 | allocate_list(xp,tcr); |
---|
1106 | handled = true; |
---|
1107 | break; |
---|
1108 | default: |
---|
1109 | handled = false; |
---|
1110 | break; |
---|
1111 | } |
---|
1112 | break; |
---|
1113 | } |
---|
1114 | |
---|
1115 | case uuo_format_unary: |
---|
1116 | switch(UUO_UNARY_field(the_uuo)) { |
---|
1117 | case 3: |
---|
1118 | if (extend_tcr_tlb(tcr,xp,UUOA_field(the_uuo))) { |
---|
1119 | handled = true; |
---|
1120 | bump = 4; |
---|
1121 | break; |
---|
1122 | } |
---|
1123 | /* fall in */ |
---|
1124 | default: |
---|
1125 | handled = false; |
---|
1126 | break; |
---|
1127 | |
---|
1128 | } |
---|
1129 | break; |
---|
1130 | |
---|
1131 | case uuo_format_nullary: |
---|
1132 | switch (UUOA_field(the_uuo)) { |
---|
1133 | case 3: |
---|
1134 | adjust_exception_pc(xp, bump); |
---|
1135 | bump = 0; |
---|
1136 | lisp_Debugger(xp, info, debug_entry_dbg, false, "Lisp Breakpoint"); |
---|
1137 | handled = true; |
---|
1138 | break; |
---|
1139 | |
---|
1140 | case 4: |
---|
1141 | tcr->interrupt_pending = 0; |
---|
1142 | callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL); |
---|
1143 | handled = true; |
---|
1144 | break; |
---|
1145 | default: |
---|
1146 | handled = false; |
---|
1147 | break; |
---|
1148 | } |
---|
1149 | break; |
---|
1150 | |
---|
1151 | |
---|
1152 | case uuo_format_error_lisptag: |
---|
1153 | case uuo_format_error_fulltag: |
---|
1154 | case uuo_format_error_xtype: |
---|
1155 | case uuo_format_cerror_lisptag: |
---|
1156 | case uuo_format_cerror_fulltag: |
---|
1157 | case uuo_format_cerror_xtype: |
---|
1158 | case uuo_format_nullary_error: |
---|
1159 | case uuo_format_unary_error: |
---|
1160 | case uuo_format_binary_error: |
---|
1161 | case uuo_format_ternary: |
---|
1162 | case uuo_format_ternary2: |
---|
1163 | handled = handle_error(xp,0,the_uuo, &bump); |
---|
1164 | break; |
---|
1165 | |
---|
1166 | default: |
---|
1167 | handled = false; |
---|
1168 | bump = 0; |
---|
1169 | } |
---|
1170 | |
---|
1171 | if (handled && bump) { |
---|
1172 | adjust_exception_pc(xp, bump); |
---|
1173 | } |
---|
1174 | return handled; |
---|
1175 | } |
---|
1176 | |
---|
1177 | natural |
---|
1178 | register_codevector_contains_pc (natural lisp_function, pc where) |
---|
1179 | { |
---|
1180 | natural code_vector, size; |
---|
1181 | |
---|
1182 | if ((fulltag_of(lisp_function) == fulltag_misc) && |
---|
1183 | (header_subtag(header_of(lisp_function)) == subtag_function)) { |
---|
1184 | code_vector = deref(lisp_function, 2); |
---|
1185 | size = header_element_count(header_of(code_vector)) << 2; |
---|
1186 | if ((untag(code_vector) < (natural)where) && |
---|
1187 | ((natural)where < (code_vector + size))) |
---|
1188 | return(code_vector); |
---|
1189 | } |
---|
1190 | |
---|
1191 | return(0); |
---|
1192 | } |
---|
1193 | |
---|
1194 | Boolean |
---|
1195 | callback_for_trap (LispObj callback_macptr, ExceptionInformation *xp, natural info,natural arg, int *bumpP) |
---|
1196 | { |
---|
1197 | return callback_to_lisp(callback_macptr, xp, info,arg, bumpP); |
---|
1198 | } |
---|
1199 | |
---|
1200 | Boolean |
---|
1201 | callback_to_lisp (LispObj callback_macptr, ExceptionInformation *xp, |
---|
1202 | natural arg1, natural arg2, int *bumpP) |
---|
1203 | { |
---|
1204 | natural callback_ptr; |
---|
1205 | area *a; |
---|
1206 | natural fnreg = Rfn, codevector, offset; |
---|
1207 | pc where = xpPC(xp); |
---|
1208 | int delta; |
---|
1209 | |
---|
1210 | codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where); |
---|
1211 | if (codevector == 0) { |
---|
1212 | fnreg = nfn; |
---|
1213 | codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where); |
---|
1214 | if (codevector == 0) { |
---|
1215 | fnreg = 0; |
---|
1216 | } |
---|
1217 | } |
---|
1218 | if (codevector) { |
---|
1219 | offset = (natural)where - (codevector - (fulltag_misc-node_size)); |
---|
1220 | } else { |
---|
1221 | offset = (natural)where; |
---|
1222 | } |
---|
1223 | |
---|
1224 | |
---|
1225 | |
---|
1226 | TCR *tcr = get_tcr(true); |
---|
1227 | |
---|
1228 | /* Put the active stack pointer where .SPcallback expects it */ |
---|
1229 | a = tcr->cs_area; |
---|
1230 | a->active = (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp)); |
---|
1231 | |
---|
1232 | /* Copy globals from the exception frame to tcr */ |
---|
1233 | tcr->save_allocptr = (void *)ptr_from_lispobj(xpGPR(xp, allocptr)); |
---|
1234 | tcr->save_vsp = (LispObj*) ptr_from_lispobj(xpGPR(xp, vsp)); |
---|
1235 | |
---|
1236 | |
---|
1237 | |
---|
1238 | /* Call back. |
---|
1239 | Lisp will handle trampolining through some code that |
---|
1240 | will push lr/fn & pc/nfn stack frames for backtrace. |
---|
1241 | */ |
---|
1242 | callback_ptr = ((macptr *)ptr_from_lispobj(untag(callback_macptr)))->address; |
---|
1243 | UNLOCK(lisp_global(EXCEPTION_LOCK), tcr); |
---|
1244 | delta = ((int (*)())callback_ptr) (xp, arg1, arg2, fnreg, offset); |
---|
1245 | LOCK(lisp_global(EXCEPTION_LOCK), tcr); |
---|
1246 | |
---|
1247 | if (bumpP) { |
---|
1248 | *bumpP = delta; |
---|
1249 | } |
---|
1250 | |
---|
1251 | /* Copy GC registers back into exception frame */ |
---|
1252 | xpGPR(xp, allocptr) = (LispObj) ptr_to_lispobj(tcr->save_allocptr); |
---|
1253 | return true; |
---|
1254 | } |
---|
1255 | |
---|
1256 | area * |
---|
1257 | allocate_no_stack (natural size) |
---|
1258 | { |
---|
1259 | #ifdef SUPPORT_PRAGMA_UNUSED |
---|
1260 | #pragma unused(size) |
---|
1261 | #endif |
---|
1262 | |
---|
1263 | return (area *) NULL; |
---|
1264 | } |
---|
1265 | |
---|
1266 | |
---|
1267 | |
---|
1268 | |
---|
1269 | |
---|
1270 | |
---|
1271 | /* callback to (symbol-value cmain) if it is a macptr, |
---|
1272 | otherwise report cause and function name to console. |
---|
1273 | Returns noErr if exception handled OK */ |
---|
1274 | OSStatus |
---|
1275 | handle_trap(ExceptionInformation *xp, opcode the_trap, pc where, siginfo_t *info) |
---|
1276 | { |
---|
1277 | LispObj cmain = nrs_CMAIN.vcell; |
---|
1278 | TCR *tcr = TCR_FROM_TSD(xpGPR(xp, rcontext)); |
---|
1279 | |
---|
1280 | } |
---|
1281 | |
---|
1282 | |
---|
1283 | |
---|
1284 | |
---|
1285 | void non_fatal_error( char *msg ) |
---|
1286 | { |
---|
1287 | fprintf( dbgout, "Non-fatal error: %s.\n", msg ); |
---|
1288 | fflush( dbgout ); |
---|
1289 | } |
---|
1290 | |
---|
1291 | |
---|
1292 | |
---|
1293 | Boolean |
---|
1294 | handle_error(ExceptionInformation *xp, unsigned arg1, unsigned arg2, int *bumpP) |
---|
1295 | { |
---|
1296 | LispObj errdisp = nrs_ERRDISP.vcell; |
---|
1297 | |
---|
1298 | if ((fulltag_of(errdisp) == fulltag_misc) && |
---|
1299 | (header_subtag(header_of(errdisp)) == subtag_macptr)) { |
---|
1300 | /* errdisp is a macptr, we can call back to lisp */ |
---|
1301 | return callback_for_trap(errdisp, xp, arg1, arg2, bumpP); |
---|
1302 | } |
---|
1303 | |
---|
1304 | return false; |
---|
1305 | } |
---|
1306 | |
---|
1307 | |
---|
1308 | /* |
---|
1309 | Current thread has all signals masked. Before unmasking them, |
---|
1310 | make it appear that the current thread has been suspended. |
---|
1311 | (This is to handle the case where another thread is trying |
---|
1312 | to GC before this thread is able to sieze the exception lock.) |
---|
1313 | */ |
---|
1314 | int |
---|
1315 | prepare_to_wait_for_exception_lock(TCR *tcr, ExceptionInformation *context) |
---|
1316 | { |
---|
1317 | int old_valence = tcr->valence; |
---|
1318 | |
---|
1319 | tcr->pending_exception_context = context; |
---|
1320 | tcr->valence = TCR_STATE_EXCEPTION_WAIT; |
---|
1321 | |
---|
1322 | ALLOW_EXCEPTIONS(context); |
---|
1323 | return old_valence; |
---|
1324 | } |
---|
1325 | |
---|
1326 | void |
---|
1327 | wait_for_exception_lock_in_handler(TCR *tcr, |
---|
1328 | ExceptionInformation *context, |
---|
1329 | xframe_list *xf) |
---|
1330 | { |
---|
1331 | |
---|
1332 | LOCK(lisp_global(EXCEPTION_LOCK), tcr); |
---|
1333 | xf->curr = context; |
---|
1334 | xf->prev = tcr->xframe; |
---|
1335 | tcr->xframe = xf; |
---|
1336 | tcr->pending_exception_context = NULL; |
---|
1337 | tcr->valence = TCR_STATE_FOREIGN; |
---|
1338 | } |
---|
1339 | |
---|
1340 | void |
---|
1341 | unlock_exception_lock_in_handler(TCR *tcr) |
---|
1342 | { |
---|
1343 | tcr->pending_exception_context = tcr->xframe->curr; |
---|
1344 | tcr->xframe = tcr->xframe->prev; |
---|
1345 | tcr->valence = TCR_STATE_EXCEPTION_RETURN; |
---|
1346 | UNLOCK(lisp_global(EXCEPTION_LOCK),tcr); |
---|
1347 | } |
---|
1348 | |
---|
1349 | /* |
---|
1350 | If an interrupt is pending on exception exit, try to ensure |
---|
1351 | that the thread sees it as soon as it's able to run. |
---|
1352 | */ |
---|
1353 | void |
---|
1354 | raise_pending_interrupt(TCR *tcr) |
---|
1355 | { |
---|
1356 | if (TCR_INTERRUPT_LEVEL(tcr) > 0) { |
---|
1357 | pthread_kill((pthread_t)ptr_from_lispobj(tcr->osid), SIGNAL_FOR_PROCESS_INTERRUPT); |
---|
1358 | } |
---|
1359 | } |
---|
1360 | |
---|
1361 | void |
---|
1362 | exit_signal_handler(TCR *tcr, int old_valence, natural old_last_lisp_frame) |
---|
1363 | { |
---|
1364 | #ifndef ANDROID |
---|
1365 | sigset_t mask; |
---|
1366 | sigfillset(&mask); |
---|
1367 | #else |
---|
1368 | int mask [] = {-1,-1}; |
---|
1369 | #endif |
---|
1370 | |
---|
1371 | pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask, NULL); |
---|
1372 | tcr->valence = old_valence; |
---|
1373 | tcr->pending_exception_context = NULL; |
---|
1374 | tcr->last_lisp_frame = old_last_lisp_frame; |
---|
1375 | } |
---|
1376 | |
---|
1377 | |
---|
1378 | void |
---|
1379 | signal_handler(int signum, siginfo_t *info, ExceptionInformation *context |
---|
1380 | #ifdef DARWIN |
---|
1381 | , TCR *tcr, int old_valence, natural old_last_lisp_frame |
---|
1382 | #endif |
---|
1383 | ) |
---|
1384 | { |
---|
1385 | xframe_list xframe_link; |
---|
1386 | #ifndef DARWIN |
---|
1387 | |
---|
1388 | TCR *tcr = (TCR *) get_interrupt_tcr(false); |
---|
1389 | |
---|
1390 | /* The signal handler's entered with all signals (notably the |
---|
1391 | thread_suspend signal) blocked. Don't allow any other signals |
---|
1392 | (notably the thread_suspend signal) to preempt us until we've |
---|
1393 | set the TCR's xframe slot to include the current exception |
---|
1394 | context. |
---|
1395 | */ |
---|
1396 | |
---|
1397 | natural old_last_lisp_frame = tcr->last_lisp_frame; |
---|
1398 | int old_valence; |
---|
1399 | |
---|
1400 | tcr->last_lisp_frame = xpGPR(context,Rsp); |
---|
1401 | old_valence = prepare_to_wait_for_exception_lock(tcr, context); |
---|
1402 | #endif |
---|
1403 | |
---|
1404 | if (tcr->flags & (1<<TCR_FLAG_BIT_PENDING_SUSPEND)) { |
---|
1405 | CLR_TCR_FLAG(tcr, TCR_FLAG_BIT_PENDING_SUSPEND); |
---|
1406 | pthread_kill(pthread_self(), thread_suspend_signal); |
---|
1407 | } |
---|
1408 | |
---|
1409 | |
---|
1410 | wait_for_exception_lock_in_handler(tcr, context, &xframe_link); |
---|
1411 | if ((!handle_exception(signum, context, tcr, info, old_valence))) { |
---|
1412 | char msg[512]; |
---|
1413 | snprintf(msg, sizeof(msg), "Unhandled exception %d at 0x%lx, context->regs at #x%lx", signum, xpPC(context), (natural)xpGPRvector(context)); |
---|
1414 | if (lisp_Debugger(context, info, signum, (old_valence != TCR_STATE_LISP), msg)) { |
---|
1415 | SET_TCR_FLAG(tcr,TCR_FLAG_BIT_PROPAGATE_EXCEPTION); |
---|
1416 | } |
---|
1417 | } |
---|
1418 | unlock_exception_lock_in_handler(tcr); |
---|
1419 | |
---|
1420 | /* This thread now looks like a thread that was suspended while |
---|
1421 | executing lisp code. If some other thread gets the exception |
---|
1422 | lock and GCs, the context (this thread's suspend_context) will |
---|
1423 | be updated. (That's only of concern if it happens before we |
---|
1424 | can return to the kernel/to the Mach exception handler). |
---|
1425 | */ |
---|
1426 | exit_signal_handler(tcr, old_valence, old_last_lisp_frame); |
---|
1427 | raise_pending_interrupt(tcr); |
---|
1428 | } |
---|
1429 | |
---|
1430 | |
---|
1431 | void |
---|
1432 | sigill_handler(int signum, siginfo_t *info, ExceptionInformation *xp) |
---|
1433 | { |
---|
1434 | pc program_counter = xpPC(xp); |
---|
1435 | opcode instr = *program_counter; |
---|
1436 | |
---|
1437 | if (IS_UUO(instr)) { |
---|
1438 | natural psr = xpPSR(xp); |
---|
1439 | Boolean opcode_matched_condition = false, |
---|
1440 | flip = ((instr & (1<<28)) != 0); |
---|
1441 | |
---|
1442 | |
---|
1443 | switch (instr >> 29) { |
---|
1444 | case 0: |
---|
1445 | opcode_matched_condition = ((psr & PSR_Z_MASK) != 0); |
---|
1446 | break; |
---|
1447 | case 1: |
---|
1448 | opcode_matched_condition = ((psr & PSR_C_MASK) != 0); |
---|
1449 | break; |
---|
1450 | case 2: |
---|
1451 | opcode_matched_condition = ((psr & PSR_N_MASK) != 0); |
---|
1452 | break; |
---|
1453 | case 3: |
---|
1454 | opcode_matched_condition = ((psr & PSR_V_MASK) != 0); |
---|
1455 | break; |
---|
1456 | case 4: |
---|
1457 | opcode_matched_condition = (((psr & PSR_C_MASK) != 0) && |
---|
1458 | ((psr & PSR_Z_MASK) == 0)); |
---|
1459 | break; |
---|
1460 | case 5: |
---|
1461 | opcode_matched_condition = (((psr & PSR_N_MASK) != 0) == |
---|
1462 | ((psr & PSR_V_MASK) != 0)); |
---|
1463 | break; |
---|
1464 | case 6: |
---|
1465 | opcode_matched_condition = ((((psr & PSR_N_MASK) != 0) == |
---|
1466 | ((psr & PSR_V_MASK) != 0)) && |
---|
1467 | ((psr & PSR_Z_MASK) == 0)); |
---|
1468 | break; |
---|
1469 | case 7: |
---|
1470 | opcode_matched_condition = true; |
---|
1471 | flip = false; |
---|
1472 | break; |
---|
1473 | } |
---|
1474 | if (flip) { |
---|
1475 | opcode_matched_condition = !opcode_matched_condition; |
---|
1476 | } |
---|
1477 | if (!opcode_matched_condition) { |
---|
1478 | adjust_exception_pc(xp,4); |
---|
1479 | return; |
---|
1480 | } |
---|
1481 | } |
---|
1482 | signal_handler(signum,info,xp); |
---|
1483 | } |
---|
1484 | |
---|
1485 | |
---|
1486 | #ifdef USE_SIGALTSTACK |
---|
1487 | void |
---|
1488 | invoke_handler_on_main_stack(int signo, siginfo_t *info, ExceptionInformation *xp, void *return_address, void *handler) |
---|
1489 | { |
---|
1490 | ExceptionInformation *xp_copy; |
---|
1491 | siginfo_t *info_copy; |
---|
1492 | extern void call_handler_on_main_stack(int, siginfo_t *, ExceptionInformation *,void *, void *); |
---|
1493 | |
---|
1494 | BytePtr target_sp= (BytePtr)xpGPR(xp,Rsp); |
---|
1495 | target_sp -= sizeof(ucontext_t); |
---|
1496 | xp_copy = (ExceptionInformation *)target_sp; |
---|
1497 | memmove(target_sp,xp,sizeof(*xp)); |
---|
1498 | xp_copy->uc_stack.ss_sp = 0; |
---|
1499 | xp_copy->uc_stack.ss_size = 0; |
---|
1500 | xp_copy->uc_stack.ss_flags = 0; |
---|
1501 | xp_copy->uc_link = NULL; |
---|
1502 | target_sp -= sizeof(siginfo_t); |
---|
1503 | info_copy = (siginfo_t *)target_sp; |
---|
1504 | memmove(target_sp,info,sizeof(*info)); |
---|
1505 | call_handler_on_main_stack(signo, info_copy, xp_copy, return_address, handler); |
---|
1506 | } |
---|
1507 | |
---|
1508 | void |
---|
1509 | altstack_signal_handler(int signo, siginfo_t *info, ExceptionInformation *xp) |
---|
1510 | { |
---|
1511 | TCR *tcr=get_tcr(true); |
---|
1512 | |
---|
1513 | if (signo == SIGBUS) { |
---|
1514 | BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address); |
---|
1515 | area *a = tcr->cs_area; |
---|
1516 | if (((BytePtr)truncate_to_power_of_2(addr,log2_page_size))== a->softlimit) |
---|
1517 | { |
---|
1518 | if (mmap(a->softlimit, |
---|
1519 | page_size, |
---|
1520 | PROT_READ|PROT_WRITE|PROT_EXEC, |
---|
1521 | MAP_PRIVATE|MAP_ANON|MAP_FIXED, |
---|
1522 | -1, |
---|
1523 | 0) == a->softlimit) { |
---|
1524 | return; |
---|
1525 | } |
---|
1526 | } |
---|
1527 | } else if (signo == SIGSEGV) { |
---|
1528 | BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address); |
---|
1529 | area *a = tcr->cs_area; |
---|
1530 | |
---|
1531 | if ((addr >= a->low) && |
---|
1532 | (addr < a->softlimit)) { |
---|
1533 | if (addr < a->hardlimit) { |
---|
1534 | Bug(xp, "hard stack overflow"); |
---|
1535 | } else { |
---|
1536 | UnProtectMemory(a->hardlimit,a->softlimit-a->hardlimit); |
---|
1537 | } |
---|
1538 | } |
---|
1539 | } |
---|
1540 | invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), signal_handler); |
---|
1541 | } |
---|
1542 | #endif |
---|
1543 | |
---|
1544 | /* |
---|
1545 | If it looks like we're in the middle of an atomic operation, make |
---|
1546 | it seem as if that operation is either complete or hasn't started |
---|
1547 | yet. |
---|
1548 | |
---|
1549 | The cases handled include: |
---|
1550 | |
---|
1551 | a) storing into a newly-allocated lisp frame on the stack. |
---|
1552 | b) marking a newly-allocated TSP frame as containing "raw" data. |
---|
1553 | c) consing: the GC has its own ideas about how this should be |
---|
1554 | handled, but other callers would be best advised to back |
---|
1555 | up or move forward, according to whether we're in the middle |
---|
1556 | of allocating a cons cell or allocating a uvector. |
---|
1557 | d) a STMW to the vsp |
---|
1558 | e) EGC write-barrier subprims. |
---|
1559 | */ |
---|
1560 | |
---|
1561 | extern opcode |
---|
1562 | egc_write_barrier_start, |
---|
1563 | egc_write_barrier_end, |
---|
1564 | egc_store_node_conditional, |
---|
1565 | egc_store_node_conditional_test, |
---|
1566 | egc_set_hash_key, egc_set_hash_key_did_store, |
---|
1567 | egc_gvset, egc_gvset_did_store, |
---|
1568 | egc_rplaca, egc_rplaca_did_store, |
---|
1569 | egc_rplacd, egc_rplacd_did_store, |
---|
1570 | egc_set_hash_key_conditional, |
---|
1571 | egc_set_hash_key_conditional_test, |
---|
1572 | swap_lr_lisp_frame_temp0, |
---|
1573 | swap_lr_lisp_frame_arg_z; |
---|
1574 | |
---|
1575 | |
---|
1576 | extern opcode ffcall_return_window, ffcall_return_window_end; |
---|
1577 | |
---|
1578 | void |
---|
1579 | pc_luser_xp(ExceptionInformation *xp, TCR *tcr, signed_natural *alloc_disp) |
---|
1580 | { |
---|
1581 | pc program_counter = xpPC(xp); |
---|
1582 | opcode instr = *program_counter; |
---|
1583 | lisp_frame *frame = (lisp_frame *)ptr_from_lispobj(xpGPR(xp,Rsp)); |
---|
1584 | LispObj cur_allocptr = xpGPR(xp, allocptr); |
---|
1585 | int allocptr_tag = fulltag_of(cur_allocptr); |
---|
1586 | |
---|
1587 | |
---|
1588 | |
---|
1589 | if ((program_counter < &egc_write_barrier_end) && |
---|
1590 | (program_counter >= &egc_write_barrier_start)) { |
---|
1591 | LispObj *ea = 0, val = 0, root = 0; |
---|
1592 | bitvector refbits = (bitvector)(lisp_global(REFBITS)); |
---|
1593 | Boolean need_check_memo = true, need_memoize_root = false; |
---|
1594 | |
---|
1595 | if (program_counter >= &egc_set_hash_key_conditional) { |
---|
1596 | if ((program_counter < &egc_set_hash_key_conditional_test) || |
---|
1597 | ((program_counter == &egc_set_hash_key_conditional_test) && |
---|
1598 | (! (xpPSR(xp) & PSR_Z_MASK)))) { |
---|
1599 | return; |
---|
1600 | } |
---|
1601 | root = xpGPR(xp,arg_x); |
---|
1602 | ea = (LispObj *) (root+xpGPR(xp,arg_y)+misc_data_offset); |
---|
1603 | need_memoize_root = true; |
---|
1604 | } else if (program_counter >= &egc_store_node_conditional) { |
---|
1605 | if ((program_counter < &egc_store_node_conditional_test) || |
---|
1606 | ((program_counter == &egc_store_node_conditional_test) && |
---|
1607 | (! (xpPSR(xp) & PSR_Z_MASK)))) { |
---|
1608 | /* The conditional store either hasn't been attempted yet, or |
---|
1609 | has failed. No need to adjust the PC, or do memoization. */ |
---|
1610 | return; |
---|
1611 | } |
---|
1612 | ea = (LispObj*)(xpGPR(xp,arg_x) + unbox_fixnum(xpGPR(xp,temp2))); |
---|
1613 | xpGPR(xp,arg_z) = t_value; |
---|
1614 | } else if (program_counter >= &egc_set_hash_key) { |
---|
1615 | if (program_counter < &egc_set_hash_key_did_store) { |
---|
1616 | return; |
---|
1617 | } |
---|
1618 | root = xpGPR(xp,arg_x); |
---|
1619 | val = xpGPR(xp,arg_z); |
---|
1620 | ea = (LispObj *) (root+unbox_fixnum(xpGPR(xp,temp2))); |
---|
1621 | need_memoize_root = true; |
---|
1622 | } else if (program_counter >= &egc_gvset) { |
---|
1623 | if (program_counter < &egc_gvset_did_store) { |
---|
1624 | return; |
---|
1625 | } |
---|
1626 | ea = (LispObj *) (xpGPR(xp,arg_x)+xpGPR(xp,arg_y)+misc_data_offset); |
---|
1627 | val = xpGPR(xp,arg_z); |
---|
1628 | } else if (program_counter >= &egc_rplacd) { |
---|
1629 | if (program_counter < &egc_rplacd_did_store) { |
---|
1630 | return; |
---|
1631 | } |
---|
1632 | ea = (LispObj *) untag(xpGPR(xp,arg_y)); |
---|
1633 | val = xpGPR(xp,arg_z); |
---|
1634 | } else { /* egc_rplaca */ |
---|
1635 | if (program_counter < &egc_rplaca_did_store) { |
---|
1636 | return; |
---|
1637 | } |
---|
1638 | ea = ((LispObj *) untag(xpGPR(xp,arg_y)))+1; |
---|
1639 | val = xpGPR(xp,arg_z); |
---|
1640 | } |
---|
1641 | if (need_check_memo) { |
---|
1642 | natural bitnumber = area_dnode(ea, lisp_global(REF_BASE)); |
---|
1643 | if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT)) && |
---|
1644 | ((LispObj)ea < val)) { |
---|
1645 | atomic_set_bit(refbits, bitnumber); |
---|
1646 | atomic_set_bit(global_refidx, bitnumber>>8); |
---|
1647 | if (need_memoize_root) { |
---|
1648 | bitnumber = area_dnode(root, lisp_global(REF_BASE)); |
---|
1649 | atomic_set_bit(refbits, bitnumber); |
---|
1650 | atomic_set_bit(global_refidx,bitnumber>>8); |
---|
1651 | } |
---|
1652 | } |
---|
1653 | } |
---|
1654 | xpPC(xp) = xpLR(xp); |
---|
1655 | return; |
---|
1656 | } |
---|
1657 | |
---|
1658 | |
---|
1659 | |
---|
1660 | if (allocptr_tag != tag_fixnum) { |
---|
1661 | signed_natural disp = allocptr_displacement(xp); |
---|
1662 | |
---|
1663 | if (disp) { |
---|
1664 | /* Being architecturally "at" the alloc trap doesn't tell |
---|
1665 | us much (in particular, it doesn't tell us whether |
---|
1666 | or not the thread has committed to taking the trap |
---|
1667 | and is waiting for the exception lock (or waiting |
---|
1668 | for the Mach exception thread to tell it how bad |
---|
1669 | things are) or is about to execute a conditional |
---|
1670 | trap. |
---|
1671 | Regardless of which case applies, we want the |
---|
1672 | other thread to take (or finish taking) the |
---|
1673 | trap, and we don't want it to consider its |
---|
1674 | current allocptr to be valid. |
---|
1675 | The difference between this case (suspend other |
---|
1676 | thread for GC) and the previous case (suspend |
---|
1677 | current thread for interrupt) is solely a |
---|
1678 | matter of what happens after we leave this |
---|
1679 | function: some non-current thread will stay |
---|
1680 | suspended until the GC finishes, then take |
---|
1681 | (or start processing) the alloc trap. The |
---|
1682 | current thread will go off and do PROCESS-INTERRUPT |
---|
1683 | or something, and may return from the interrupt |
---|
1684 | and need to finish the allocation that got interrupted. |
---|
1685 | */ |
---|
1686 | |
---|
1687 | if (alloc_disp) { |
---|
1688 | *alloc_disp = disp; |
---|
1689 | xpGPR(xp,allocptr) -= disp; |
---|
1690 | /* Leave the PC at the alloc trap. When the interrupt |
---|
1691 | handler returns, it'll decrement allocptr by disp |
---|
1692 | and the trap may or may not be taken. |
---|
1693 | */ |
---|
1694 | } else { |
---|
1695 | Boolean ok = false; |
---|
1696 | update_bytes_allocated(tcr, (void *) ptr_from_lispobj(cur_allocptr - disp)); |
---|
1697 | xpGPR(xp, allocptr) = VOID_ALLOCPTR + disp; |
---|
1698 | instr = program_counter[-1]; |
---|
1699 | if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) { |
---|
1700 | instr = program_counter[-2]; |
---|
1701 | if (IS_COMPARE_ALLOCPTR_TO_RM(instr)){ |
---|
1702 | xpGPR(xp,RM_field(instr)) = VOID_ALLOCPTR; |
---|
1703 | ok = true; |
---|
1704 | } |
---|
1705 | } |
---|
1706 | if (ok) { |
---|
1707 | /* Clear the carry bit, so that the trap will be taken. */ |
---|
1708 | xpPSR(xp) &= ~PSR_C_MASK; |
---|
1709 | } else { |
---|
1710 | Bug(NULL, "unexpected instruction preceding alloc trap."); |
---|
1711 | } |
---|
1712 | } |
---|
1713 | } else { |
---|
1714 | /* we may be before or after the alloc trap. If before, set |
---|
1715 | allocptr to VOID_ALLOCPTR and back up to the start of the |
---|
1716 | instruction sequence; if after, finish the allocation. */ |
---|
1717 | Boolean before_alloc_trap = false; |
---|
1718 | |
---|
1719 | if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) { |
---|
1720 | before_alloc_trap = true; |
---|
1721 | --program_counter; |
---|
1722 | instr = *program_counter; |
---|
1723 | } |
---|
1724 | if (IS_COMPARE_ALLOCPTR_TO_RM(instr)) { |
---|
1725 | before_alloc_trap = true; |
---|
1726 | --program_counter; |
---|
1727 | instr = *program_counter; |
---|
1728 | } |
---|
1729 | if (IS_LOAD_RD_FROM_ALLOCBASE(instr)) { |
---|
1730 | before_alloc_trap = true; |
---|
1731 | --program_counter; |
---|
1732 | instr = *program_counter; |
---|
1733 | } |
---|
1734 | if (IS_SUB_HI_FROM_ALLOCPTR(instr)) { |
---|
1735 | before_alloc_trap = true; |
---|
1736 | --program_counter; |
---|
1737 | } |
---|
1738 | if (before_alloc_trap) { |
---|
1739 | xpPC(xp) = program_counter; |
---|
1740 | xpGPR(xp,allocptr) = VOID_ALLOCPTR; |
---|
1741 | } else { |
---|
1742 | /* If we're already past the alloc_trap, finish allocating |
---|
1743 | the object. */ |
---|
1744 | if (allocptr_tag == fulltag_cons) { |
---|
1745 | finish_allocating_cons(xp); |
---|
1746 | } else { |
---|
1747 | if (allocptr_tag == fulltag_misc) { |
---|
1748 | finish_allocating_uvector(xp); |
---|
1749 | } else { |
---|
1750 | Bug(xp, "what's being allocated here ?"); |
---|
1751 | } |
---|
1752 | } |
---|
1753 | /* Whatever we finished allocating, reset allocptr/allocbase to |
---|
1754 | VOID_ALLOCPTR */ |
---|
1755 | xpGPR(xp,allocptr) = VOID_ALLOCPTR; |
---|
1756 | } |
---|
1757 | return; |
---|
1758 | } |
---|
1759 | return; |
---|
1760 | } |
---|
1761 | { |
---|
1762 | lisp_frame *swap_frame = NULL; |
---|
1763 | pc base = &swap_lr_lisp_frame_temp0; |
---|
1764 | |
---|
1765 | if ((program_counter >base) /* sic */ |
---|
1766 | && (program_counter < (base+3))) { |
---|
1767 | swap_frame = (lisp_frame *)xpGPR(xp,temp0); |
---|
1768 | } else { |
---|
1769 | base = &swap_lr_lisp_frame_arg_z; |
---|
1770 | if ((program_counter > base) && (program_counter < (base+3))) { |
---|
1771 | swap_frame = (lisp_frame *)xpGPR(xp,arg_z); |
---|
1772 | } |
---|
1773 | } |
---|
1774 | if (swap_frame) { |
---|
1775 | if (program_counter == (base+1)) { |
---|
1776 | swap_frame->savelr = xpGPR(xp,Rlr); |
---|
1777 | } |
---|
1778 | xpGPR(xp,Rlr) = xpGPR(xp,imm0); |
---|
1779 | xpPC(xp) = base+3; |
---|
1780 | return; |
---|
1781 | } |
---|
1782 | } |
---|
1783 | } |
---|
1784 | |
---|
1785 | void |
---|
1786 | interrupt_handler (int signum, siginfo_t *info, ExceptionInformation *context) |
---|
1787 | { |
---|
1788 | TCR *tcr = get_interrupt_tcr(false); |
---|
1789 | if (tcr) { |
---|
1790 | if (TCR_INTERRUPT_LEVEL(tcr) < 0) { |
---|
1791 | tcr->interrupt_pending = 1 << fixnumshift; |
---|
1792 | } else { |
---|
1793 | LispObj cmain = nrs_CMAIN.vcell; |
---|
1794 | |
---|
1795 | if ((fulltag_of(cmain) == fulltag_misc) && |
---|
1796 | (header_subtag(header_of(cmain)) == subtag_macptr)) { |
---|
1797 | /* |
---|
1798 | This thread can (allegedly) take an interrupt now. |
---|
1799 | It's tricky to do that if we're executing |
---|
1800 | foreign code (especially Linuxthreads code, much |
---|
1801 | of which isn't reentrant.) |
---|
1802 | If we're unwinding the stack, we also want to defer |
---|
1803 | the interrupt. |
---|
1804 | */ |
---|
1805 | if ((tcr->valence != TCR_STATE_LISP) || |
---|
1806 | (tcr->unwinding != 0)) { |
---|
1807 | tcr->interrupt_pending = 1 << fixnumshift; |
---|
1808 | } else { |
---|
1809 | xframe_list xframe_link; |
---|
1810 | int old_valence; |
---|
1811 | signed_natural disp=0; |
---|
1812 | natural old_last_lisp_frame = tcr->last_lisp_frame; |
---|
1813 | |
---|
1814 | tcr->last_lisp_frame = xpGPR(context,Rsp); |
---|
1815 | pc_luser_xp(context, tcr, &disp); |
---|
1816 | old_valence = prepare_to_wait_for_exception_lock(tcr, context); |
---|
1817 | wait_for_exception_lock_in_handler(tcr, context, &xframe_link); |
---|
1818 | handle_exception(signum, context, tcr, info, old_valence); |
---|
1819 | if (disp) { |
---|
1820 | xpGPR(context,allocptr) -= disp; |
---|
1821 | } |
---|
1822 | unlock_exception_lock_in_handler(tcr); |
---|
1823 | exit_signal_handler(tcr, old_valence, old_last_lisp_frame); |
---|
1824 | } |
---|
1825 | } |
---|
1826 | } |
---|
1827 | } |
---|
1828 | #ifdef DARWIN |
---|
1829 | DarwinSigReturn(context); |
---|
1830 | #endif |
---|
1831 | } |
---|
1832 | |
---|
1833 | #ifdef USE_SIGALTSTACK |
---|
1834 | void |
---|
1835 | altstack_interrupt_handler(int signum, siginfo_t *info, ExceptionInformation *context) |
---|
1836 | { |
---|
1837 | invoke_handler_on_main_stack(signum, info, context, __builtin_return_address(0),interrupt_handler); |
---|
1838 | } |
---|
1839 | #endif |
---|
1840 | |
---|
1841 | |
---|
1842 | void |
---|
1843 | install_signal_handler(int signo, void *handler, unsigned flags) |
---|
1844 | { |
---|
1845 | struct sigaction sa; |
---|
1846 | int err; |
---|
1847 | |
---|
1848 | sigfillset(&sa.sa_mask); |
---|
1849 | |
---|
1850 | sa.sa_sigaction = (void *)handler; |
---|
1851 | sigfillset(&sa.sa_mask); |
---|
1852 | sa.sa_flags = SA_SIGINFO; |
---|
1853 | |
---|
1854 | #ifdef ANDROID |
---|
1855 | sa.sa_flags |= SA_NODEFER; |
---|
1856 | #endif |
---|
1857 | #ifdef USE_SIGALTSTACK |
---|
1858 | if (flags & ON_ALTSTACK) |
---|
1859 | sa.sa_flags |= SA_ONSTACK; |
---|
1860 | #endif |
---|
1861 | if (flags & RESTART_SYSCALLS) |
---|
1862 | sa.sa_flags |= SA_RESTART; |
---|
1863 | if (flags & RESERVE_FOR_LISP) { |
---|
1864 | extern sigset_t user_signals_reserved; |
---|
1865 | sigaddset(&user_signals_reserved, signo); |
---|
1866 | } |
---|
1867 | |
---|
1868 | err = sigaction(signo, &sa, NULL); |
---|
1869 | if (err) { |
---|
1870 | perror("sigaction"); |
---|
1871 | exit(1); |
---|
1872 | } |
---|
1873 | } |
---|
1874 | |
---|
1875 | |
---|
1876 | void |
---|
1877 | install_pmcl_exception_handlers() |
---|
1878 | { |
---|
1879 | install_signal_handler(SIGILL, (void *)sigill_handler, RESERVE_FOR_LISP); |
---|
1880 | install_signal_handler(SIGSEGV, (void *)ALTSTACK(signal_handler), |
---|
1881 | RESERVE_FOR_LISP|ON_ALTSTACK); |
---|
1882 | install_signal_handler(SIGBUS, (void *)ALTSTACK(signal_handler), |
---|
1883 | RESERVE_FOR_LISP|ON_ALTSTACK); |
---|
1884 | install_signal_handler(SIGNAL_FOR_PROCESS_INTERRUPT, |
---|
1885 | (void *)interrupt_handler, RESERVE_FOR_LISP); |
---|
1886 | signal(SIGPIPE, SIG_IGN); |
---|
1887 | } |
---|
1888 | |
---|
1889 | |
---|
1890 | #ifdef USE_SIGALTSTACK |
---|
1891 | void |
---|
1892 | setup_sigaltstack(area *a) |
---|
1893 | { |
---|
1894 | stack_t stack; |
---|
1895 | #if 0 |
---|
1896 | stack.ss_sp = a->low; |
---|
1897 | a->low += SIGSTKSZ*8; |
---|
1898 | #endif |
---|
1899 | stack.ss_size = SIGSTKSZ*8; |
---|
1900 | stack.ss_flags = 0; |
---|
1901 | stack.ss_sp = mmap(NULL,stack.ss_size, PROT_READ|PROT_WRITE|PROT_EXEC,MAP_ANON|MAP_PRIVATE,-1,0); |
---|
1902 | if (sigaltstack(&stack, NULL) != 0) { |
---|
1903 | perror("sigaltstack"); |
---|
1904 | exit(-1); |
---|
1905 | } |
---|
1906 | } |
---|
1907 | #endif |
---|
1908 | |
---|
1909 | void |
---|
1910 | thread_kill_handler(int signum, siginfo_t info, ExceptionInformation *xp) |
---|
1911 | { |
---|
1912 | TCR *tcr = get_tcr(false); |
---|
1913 | area *a; |
---|
1914 | #ifndef ANDROID |
---|
1915 | sigset_t mask; |
---|
1916 | |
---|
1917 | sigemptyset(&mask); |
---|
1918 | #else |
---|
1919 | int mask[] = {0,0}; |
---|
1920 | #endif |
---|
1921 | |
---|
1922 | if (tcr) { |
---|
1923 | tcr->valence = TCR_STATE_FOREIGN; |
---|
1924 | a = tcr->vs_area; |
---|
1925 | if (a) { |
---|
1926 | a->active = a->high; |
---|
1927 | } |
---|
1928 | a = tcr->cs_area; |
---|
1929 | if (a) { |
---|
1930 | a->active = a->high; |
---|
1931 | } |
---|
1932 | } |
---|
1933 | |
---|
1934 | pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask,NULL); |
---|
1935 | pthread_exit(NULL); |
---|
1936 | } |
---|
1937 | |
---|
1938 | #ifdef USE_SIGALTSTACK |
---|
1939 | void |
---|
1940 | altstack_thread_kill_handler(int signo, siginfo_t *info, ExceptionInformation *xp) |
---|
1941 | { |
---|
1942 | invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), thread_kill_handler); |
---|
1943 | } |
---|
1944 | #endif |
---|
1945 | |
---|
1946 | void |
---|
1947 | thread_signal_setup() |
---|
1948 | { |
---|
1949 | thread_suspend_signal = SIG_SUSPEND_THREAD; |
---|
1950 | thread_kill_signal = SIG_KILL_THREAD; |
---|
1951 | |
---|
1952 | install_signal_handler(thread_suspend_signal, (void *)suspend_resume_handler, |
---|
1953 | RESERVE_FOR_LISP|RESTART_SYSCALLS); |
---|
1954 | install_signal_handler(thread_kill_signal, (void *)thread_kill_handler, |
---|
1955 | RESERVE_FOR_LISP); |
---|
1956 | } |
---|
1957 | |
---|
1958 | |
---|
1959 | |
---|
1960 | void |
---|
1961 | unprotect_all_areas() |
---|
1962 | { |
---|
1963 | protected_area_ptr p; |
---|
1964 | |
---|
1965 | for(p = AllProtectedAreas, AllProtectedAreas = NULL; p; p = p->next) { |
---|
1966 | unprotect_area(p); |
---|
1967 | } |
---|
1968 | } |
---|
1969 | |
---|
1970 | /* |
---|
1971 | A binding subprim has just done "twlle limit_regno,idx_regno" and |
---|
1972 | the trap's been taken. Extend the tcr's tlb so that the index will |
---|
1973 | be in bounds and the new limit will be on a page boundary, filling |
---|
1974 | in the new page(s) with 'no_thread_local_binding_marker'. Update |
---|
1975 | the tcr fields and the registers in the xp and return true if this |
---|
1976 | all works, false otherwise. |
---|
1977 | |
---|
1978 | Note that the tlb was allocated via malloc, so realloc can do some |
---|
1979 | of the hard work. |
---|
1980 | */ |
---|
1981 | Boolean |
---|
1982 | extend_tcr_tlb(TCR *tcr, |
---|
1983 | ExceptionInformation *xp, |
---|
1984 | unsigned idx_regno) |
---|
1985 | { |
---|
1986 | unsigned |
---|
1987 | index = (unsigned) (xpGPR(xp,idx_regno)), |
---|
1988 | old_limit = tcr->tlb_limit, |
---|
1989 | new_limit = align_to_power_of_2(index+1,12), |
---|
1990 | new_bytes = new_limit-old_limit; |
---|
1991 | LispObj |
---|
1992 | *old_tlb = tcr->tlb_pointer, |
---|
1993 | *new_tlb = realloc(old_tlb, new_limit), |
---|
1994 | *work; |
---|
1995 | |
---|
1996 | if (new_tlb == NULL) { |
---|
1997 | return false; |
---|
1998 | } |
---|
1999 | |
---|
2000 | work = (LispObj *) ((BytePtr)new_tlb+old_limit); |
---|
2001 | |
---|
2002 | while (new_bytes) { |
---|
2003 | *work++ = no_thread_local_binding_marker; |
---|
2004 | new_bytes -= sizeof(LispObj); |
---|
2005 | } |
---|
2006 | tcr->tlb_pointer = new_tlb; |
---|
2007 | tcr->tlb_limit = new_limit; |
---|
2008 | return true; |
---|
2009 | } |
---|
2010 | |
---|
2011 | |
---|
2012 | |
---|
2013 | void |
---|
2014 | exception_init() |
---|
2015 | { |
---|
2016 | install_pmcl_exception_handlers(); |
---|
2017 | } |
---|
2018 | |
---|
2019 | |
---|
2020 | |
---|
2021 | |
---|
2022 | |
---|
2023 | |
---|
2024 | |
---|
2025 | void |
---|
2026 | early_signal_handler(int signum, siginfo_t *info, ExceptionInformation *context) |
---|
2027 | { |
---|
2028 | extern pc feature_check_fpu,feature_check_ldrex,feature_check_clrex; |
---|
2029 | extern int arm_architecture_version; |
---|
2030 | |
---|
2031 | if ((xpPC(context) == feature_check_fpu) || |
---|
2032 | (xpPC(context) == feature_check_ldrex)) { |
---|
2033 | arm_architecture_version = 5; |
---|
2034 | } else { |
---|
2035 | arm_architecture_version = 6; |
---|
2036 | } |
---|
2037 | xpPC(context) = xpLR(context); |
---|
2038 | } |
---|