@@ -312,107 +312,124 @@ code_t caml_next_frame_pointer(value* stack_high, value ** sp,
312
312
return NULL ;
313
313
}
314
314
315
- /* Stores upto [max_frames_value] frames of the current call stack to
316
- return to the user. This is used not in an exception-raising context, but
317
- only when the user requests to save the trace (hopefully less often).
318
- Instead of using a bounded buffer as [Caml_state->stash_backtrace], we first
319
- traverse the stack to compute the right size, then allocate space for the
320
- trace. */
321
-
322
- static void get_callstack (value * sp , intnat trap_spoff ,
323
- struct stack_info * stack ,
324
- intnat max_frames ,
325
- code_t * * trace , intnat * trace_size )
315
+ /* minimum size to allocate a backtrace (in slots) */
316
+ #define MIN_BACKTRACE_SIZE 16
317
+
318
+ /* Stores up to [max_slots] backtrace slots of the current call stack
319
+ to return to the user in [*backtrace_p] (with the allocated size in
320
+ [*alloc_size_p]). Returns the number of frames stored. Instead of
321
+ using a bounded buffer as [Caml_state->stash_backtrace], we
322
+ dynamically grow the allocated space as required. */
323
+
324
+ static size_t get_callstack (value * sp , intnat trap_spoff ,
325
+ struct stack_info * stack ,
326
+ intnat max_slots ,
327
+ backtrace_slot * * backtrace_p ,
328
+ size_t * alloc_size_p )
326
329
{
330
+ backtrace_slot * backtrace = * backtrace_p ;
331
+ size_t alloc_size = * alloc_size_p ;
327
332
struct stack_info * parent = Stack_parent (stack );
328
333
value * stack_high = Stack_high (stack );
329
- value * saved_sp = sp ;
330
- intnat saved_trap_spoff = trap_spoff ;
331
-
332
334
CAMLnoalloc ;
333
335
334
- /* first compute the size of the trace */
335
- {
336
- * trace_size = 0 ;
337
- while (* trace_size < max_frames ) {
338
- code_t p = caml_next_frame_pointer (stack_high , & sp , & trap_spoff );
339
- if (p == NULL ) {
340
- if (parent == NULL ) break ;
341
- sp = parent -> sp ;
342
- trap_spoff = Long_val (sp [0 ]);
343
- stack_high = Stack_high (parent );
344
- parent = Stack_parent (parent );
345
- } else {
346
- ++ * trace_size ;
336
+ size_t slots = 0 ;
337
+ while (slots < max_slots ) {
338
+ code_t p = caml_next_frame_pointer (stack_high , & sp , & trap_spoff );
339
+ if (!p ) {
340
+ if (!parent ) break ;
341
+ sp = parent -> sp ;
342
+ trap_spoff = Long_val (sp [0 ]);
343
+ stack_high = Stack_high (parent );
344
+ parent = Stack_parent (parent );
345
+ } else {
346
+ if (slots == alloc_size ) {
347
+ size_t new_size = alloc_size ? alloc_size * 2 : MIN_BACKTRACE_SIZE ;
348
+ backtrace = caml_stat_resize_noexc (backtrace ,
349
+ sizeof (backtrace_slot ) * new_size );
350
+
351
+ if (!backtrace ) { /* allocation failed */
352
+ * backtrace_p = NULL ;
353
+ * alloc_size_p = 0 ;
354
+ return 0 ;
355
+ }
356
+ alloc_size = new_size ;
347
357
}
348
- }
349
- }
350
-
351
- * trace = caml_stat_alloc (sizeof (code_t * ) * * trace_size );
352
-
353
- sp = saved_sp ;
354
- parent = Stack_parent (stack );
355
- stack_high = Stack_high (stack );
356
- trap_spoff = saved_trap_spoff ;
357
358
358
- /* then collect the trace */
359
- {
360
- uintnat trace_pos = 0 ;
361
-
362
- while (trace_pos < * trace_size ) {
363
- code_t p = caml_next_frame_pointer (stack_high , & sp , & trap_spoff );
364
- if (p == NULL ) {
365
- sp = parent -> sp ;
366
- trap_spoff = Long_val (sp [0 ]);
367
- stack_high = Stack_high (parent );
368
- parent = Stack_parent (parent );
369
- } else {
370
- (* trace )[trace_pos ] = p ;
371
- ++ trace_pos ;
372
- }
359
+ backtrace [slots ++ ] = p ;
373
360
}
374
361
}
362
+ * backtrace_p = backtrace ;
363
+ * alloc_size_p = alloc_size ;
364
+ return slots ;
375
365
}
376
366
377
- static value alloc_callstack (code_t * trace , intnat trace_len )
367
+ static value alloc_callstack (backtrace_slot * trace , size_t slots )
378
368
{
379
369
CAMLparam0 ();
380
370
CAMLlocal1 (callstack );
381
371
int i ;
382
- callstack = caml_alloc (trace_len , 0 );
383
- for (i = 0 ; i < trace_len ; i ++ )
372
+ callstack = caml_alloc (slots , 0 );
373
+ for (i = 0 ; i < slots ; i ++ )
384
374
Store_field (callstack , i , Val_backtrace_slot (trace [i ]));
385
375
caml_stat_free (trace );
386
376
CAMLreturn (callstack );
387
377
}
388
378
379
+ /* Obtain up to [max_slots] of the callstack of the current domain,
380
+ * including parent fibers. The callstack is written into [*buffer_p],
381
+ * current size [*alloc_size_p], which should be reallocated (on the C
382
+ * heap) if required. Returns the number of slots obtained.
383
+ *
384
+ * [alloc_idx] is ignored, and must be negative (this interface is
385
+ * also used by the native-code runtime, in which [alloc_idx] is
386
+ * meaningful.
387
+ */
388
+
389
+ size_t caml_get_callstack (size_t max_slots ,
390
+ backtrace_slot * * buffer_p ,
391
+ size_t * alloc_size_p ,
392
+ ssize_t alloc_idx )
393
+ {
394
+ CAMLassert (alloc_idx < 1 ); /* allocation indexes not used in bytecode */
395
+ return get_callstack (Caml_state -> current_stack -> sp ,
396
+ Caml_state -> trap_sp_off ,
397
+ Caml_state -> current_stack ,
398
+ max_slots ,
399
+ buffer_p , alloc_size_p );
400
+ }
401
+
389
402
CAMLprim value caml_get_current_callstack (value max_frames_value )
390
403
{
391
- code_t * trace ;
392
- intnat trace_len ;
393
- get_callstack (Caml_state -> current_stack -> sp , Caml_state -> trap_sp_off ,
394
- Caml_state -> current_stack , Long_val (max_frames_value ),
395
- & trace , & trace_len );
396
- return alloc_callstack (trace , trace_len );
404
+ backtrace_slot * backtrace = NULL ;
405
+ size_t trace_size = 0 ;
406
+ size_t slots = get_callstack (Caml_state -> current_stack -> sp ,
407
+ Caml_state -> trap_sp_off ,
408
+ Caml_state -> current_stack ,
409
+ Long_val (max_frames_value ),
410
+ & backtrace , & trace_size );
411
+ return alloc_callstack (backtrace , slots );
397
412
}
398
413
399
414
CAMLprim value caml_get_continuation_callstack (value cont , value max_frames )
400
415
{
401
- code_t * trace ;
402
- intnat trace_len ;
416
+ backtrace_slot * backtrace = NULL ;
417
+ size_t trace_size = 0 ;
418
+ size_t slots ;
403
419
struct stack_info * stack ;
404
420
value * sp ;
405
421
406
422
stack = Ptr_val (caml_continuation_use (cont ));
407
423
{
408
424
CAMLnoalloc ; /* GC must not see the stack outside the cont */
409
425
sp = stack -> sp ;
410
- get_callstack (sp , Long_val (sp [0 ]), stack , Long_val (max_frames ),
411
- & trace , & trace_len );
426
+ slots = get_callstack (sp , Long_val (sp [0 ]),
427
+ stack , Long_val (max_frames ),
428
+ & backtrace , & trace_size );
412
429
caml_continuation_replace (cont , stack );
413
430
}
414
431
415
- return alloc_callstack (trace , trace_len );
432
+ return alloc_callstack (backtrace , slots );
416
433
}
417
434
418
435
0 commit comments