Merge pull request #8713 from kayceesrk/r14-globals

Move C global variables to a dedicated structure
master
Stephen Dolan 2019-08-27 13:30:22 +01:00 committed by GitHub
commit 5ad64306d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
102 changed files with 4437 additions and 3431 deletions

21
.depend
View File

@ -53,6 +53,11 @@ utils/consistbl.cmx : \
utils/consistbl.cmi utils/consistbl.cmi
utils/consistbl.cmi : \ utils/consistbl.cmi : \
utils/misc.cmi utils/misc.cmi
utils/domainstate.cmo : \
utils/domainstate.cmi
utils/domainstate.cmx : \
utils/domainstate.cmi
utils/domainstate.cmi :
utils/identifiable.cmo : \ utils/identifiable.cmo : \
utils/misc.cmi \ utils/misc.cmi \
utils/identifiable.cmi utils/identifiable.cmi
@ -2273,7 +2278,9 @@ asmcomp/emit.cmo : \
utils/misc.cmi \ utils/misc.cmi \
asmcomp/mach.cmi \ asmcomp/mach.cmi \
asmcomp/linearize.cmi \ asmcomp/linearize.cmi \
lambda/lambda.cmi \
asmcomp/emitaux.cmi \ asmcomp/emitaux.cmi \
utils/domainstate.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
utils/config.cmi \ utils/config.cmi \
middle_end/compilenv.cmi \ middle_end/compilenv.cmi \
@ -2293,7 +2300,9 @@ asmcomp/emit.cmx : \
utils/misc.cmx \ utils/misc.cmx \
asmcomp/mach.cmx \ asmcomp/mach.cmx \
asmcomp/linearize.cmx \ asmcomp/linearize.cmx \
lambda/lambda.cmx \
asmcomp/emitaux.cmx \ asmcomp/emitaux.cmx \
utils/domainstate.cmx \
lambda/debuginfo.cmx \ lambda/debuginfo.cmx \
utils/config.cmx \ utils/config.cmx \
middle_end/compilenv.cmx \ middle_end/compilenv.cmx \
@ -2353,6 +2362,7 @@ asmcomp/linearize.cmo : \
asmcomp/proc.cmi \ asmcomp/proc.cmi \
utils/misc.cmi \ utils/misc.cmi \
asmcomp/mach.cmi \ asmcomp/mach.cmi \
lambda/lambda.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
utils/config.cmi \ utils/config.cmi \
asmcomp/cmm.cmi \ asmcomp/cmm.cmi \
@ -2362,6 +2372,7 @@ asmcomp/linearize.cmx : \
asmcomp/proc.cmx \ asmcomp/proc.cmx \
utils/misc.cmx \ utils/misc.cmx \
asmcomp/mach.cmx \ asmcomp/mach.cmx \
lambda/lambda.cmx \
lambda/debuginfo.cmx \ lambda/debuginfo.cmx \
utils/config.cmx \ utils/config.cmx \
asmcomp/cmm.cmx \ asmcomp/cmm.cmx \
@ -2369,6 +2380,7 @@ asmcomp/linearize.cmx : \
asmcomp/linearize.cmi : \ asmcomp/linearize.cmi : \
asmcomp/reg.cmi \ asmcomp/reg.cmi \
asmcomp/mach.cmi \ asmcomp/mach.cmi \
lambda/lambda.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
asmcomp/cmm.cmi asmcomp/cmm.cmi
asmcomp/linscan.cmo : \ asmcomp/linscan.cmo : \
@ -2406,6 +2418,7 @@ asmcomp/mach.cmo : \
asmcomp/debug/reg_with_debug_info.cmi \ asmcomp/debug/reg_with_debug_info.cmi \
asmcomp/debug/reg_availability_set.cmi \ asmcomp/debug/reg_availability_set.cmi \
asmcomp/reg.cmi \ asmcomp/reg.cmi \
lambda/lambda.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
asmcomp/cmm.cmi \ asmcomp/cmm.cmi \
middle_end/backend_var.cmi \ middle_end/backend_var.cmi \
@ -2415,6 +2428,7 @@ asmcomp/mach.cmx : \
asmcomp/debug/reg_with_debug_info.cmx \ asmcomp/debug/reg_with_debug_info.cmx \
asmcomp/debug/reg_availability_set.cmx \ asmcomp/debug/reg_availability_set.cmx \
asmcomp/reg.cmx \ asmcomp/reg.cmx \
lambda/lambda.cmx \
lambda/debuginfo.cmx \ lambda/debuginfo.cmx \
asmcomp/cmm.cmx \ asmcomp/cmm.cmx \
middle_end/backend_var.cmx \ middle_end/backend_var.cmx \
@ -2423,6 +2437,7 @@ asmcomp/mach.cmx : \
asmcomp/mach.cmi : \ asmcomp/mach.cmi : \
asmcomp/debug/reg_availability_set.cmi \ asmcomp/debug/reg_availability_set.cmi \
asmcomp/reg.cmi \ asmcomp/reg.cmi \
lambda/lambda.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
asmcomp/cmm.cmi \ asmcomp/cmm.cmi \
middle_end/backend_var.cmi \ middle_end/backend_var.cmi \
@ -2448,16 +2463,16 @@ asmcomp/printcmm.cmi : \
asmcomp/cmm.cmi asmcomp/cmm.cmi
asmcomp/printlinear.cmo : \ asmcomp/printlinear.cmo : \
asmcomp/printmach.cmi \ asmcomp/printmach.cmi \
asmcomp/printcmm.cmi \
asmcomp/mach.cmi \ asmcomp/mach.cmi \
asmcomp/linearize.cmi \ asmcomp/linearize.cmi \
lambda/lambda.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
asmcomp/printlinear.cmi asmcomp/printlinear.cmi
asmcomp/printlinear.cmx : \ asmcomp/printlinear.cmx : \
asmcomp/printmach.cmx \ asmcomp/printmach.cmx \
asmcomp/printcmm.cmx \
asmcomp/mach.cmx \ asmcomp/mach.cmx \
asmcomp/linearize.cmx \ asmcomp/linearize.cmx \
lambda/lambda.cmx \
lambda/debuginfo.cmx \ lambda/debuginfo.cmx \
asmcomp/printlinear.cmi asmcomp/printlinear.cmi
asmcomp/printlinear.cmi : \ asmcomp/printlinear.cmi : \
@ -2468,6 +2483,7 @@ asmcomp/printmach.cmo : \
asmcomp/proc.cmi \ asmcomp/proc.cmi \
asmcomp/printcmm.cmi \ asmcomp/printcmm.cmi \
asmcomp/mach.cmi \ asmcomp/mach.cmi \
lambda/lambda.cmi \
asmcomp/interval.cmi \ asmcomp/interval.cmi \
lambda/debuginfo.cmi \ lambda/debuginfo.cmi \
utils/config.cmi \ utils/config.cmi \
@ -2482,6 +2498,7 @@ asmcomp/printmach.cmx : \
asmcomp/proc.cmx \ asmcomp/proc.cmx \
asmcomp/printcmm.cmx \ asmcomp/printcmm.cmx \
asmcomp/mach.cmx \ asmcomp/mach.cmx \
lambda/lambda.cmx \
asmcomp/interval.cmx \ asmcomp/interval.cmx \
lambda/debuginfo.cmx \ lambda/debuginfo.cmx \
utils/config.cmx \ utils/config.cmx \

4
.gitignore vendored
View File

@ -189,6 +189,8 @@ _build
/runtime/.gdb_history /runtime/.gdb_history
/runtime/*.d.c /runtime/*.d.c
/runtime/*.pic.c /runtime/*.pic.c
/runtime/domain_state32.inc
/runtime/domain_state64.inc
/stdlib/camlheader /stdlib/camlheader
/stdlib/target_camlheader /stdlib/target_camlheader
@ -257,6 +259,8 @@ _build
/tools/caml-tex /tools/caml-tex
/utils/config.ml /utils/config.ml
/utils/domainstate.ml
/utils/domainstate.mli
/yacc/ocamlyacc /yacc/ocamlyacc
/yacc/version.h /yacc/version.h

View File

@ -85,6 +85,12 @@ Working version
the new hook caml_fatal_error_hook. the new hook caml_fatal_error_hook.
(Jacques-Henri Jourdan, review by Xavier Leroy) (Jacques-Henri Jourdan, review by Xavier Leroy)
- #8713: Introduce a state table in the runtime to contain the global variables
which must be duplicated for each domain in the multicore runtime.
(KC Sivaramakrishnan and Stephen Dolan, compatibility header hacking by
David Allsopp, review by David Allsopp, Alain Frisch, Nicolas Ojeda Bar,
Gabriel Scherer and Damien Doligez)
### Tools: ### Tools:
* #6792, #8654 ocamldebug now supports program using Dynlink. This * #6792, #8654 ocamldebug now supports program using Dynlink. This

View File

@ -77,14 +77,12 @@ DEPINCLUDES=$(INCLUDES)
OCAMLDOC_OPT=$(WITH_OCAMLDOC:=.opt) OCAMLDOC_OPT=$(WITH_OCAMLDOC:=.opt)
UTILS=utils/config.cmo utils/build_path_prefix_map.cmo utils/misc.cmo \ UTILS=utils/config.cmo utils/build_path_prefix_map.cmo utils/misc.cmo \
utils/identifiable.cmo utils/numbers.cmo utils/arg_helper.cmo \ utils/identifiable.cmo utils/numbers.cmo utils/arg_helper.cmo \
utils/clflags.cmo utils/profile.cmo \ utils/clflags.cmo utils/profile.cmo utils/load_path.cmo \
utils/load_path.cmo \ utils/terminfo.cmo utils/ccomp.cmo utils/warnings.cmo \
utils/terminfo.cmo utils/ccomp.cmo utils/warnings.cmo \ utils/consistbl.cmo utils/strongly_connected_components.cmo \
utils/consistbl.cmo \ utils/targetint.cmo utils/int_replace_polymorphic_compare.cmo \
utils/strongly_connected_components.cmo \ utils/domainstate.cmo
utils/targetint.cmo \
utils/int_replace_polymorphic_compare.cmo
PARSING=parsing/location.cmo parsing/longident.cmo \ PARSING=parsing/location.cmo parsing/longident.cmo \
parsing/docstrings.cmo parsing/syntaxerr.cmo \ parsing/docstrings.cmo parsing/syntaxerr.cmo \
@ -338,12 +336,18 @@ reconfigure:
./configure $(CONFIGURE_ARGS) ./configure $(CONFIGURE_ARGS)
endif endif
utils/domainstate.ml: utils/domainstate.ml.c runtime/caml/domain_state.tbl
$(CPP) -I runtime/caml $< > $@
utils/domainstate.mli: utils/domainstate.mli.c runtime/caml/domain_state.tbl
$(CPP) -I runtime/caml $< > $@
.PHONY: partialclean .PHONY: partialclean
partialclean:: partialclean::
rm -f utils/config.ml rm -f utils/config.ml utils/domainstate.ml utils/domainstate.mli
.PHONY: beforedepend .PHONY: beforedepend
beforedepend:: utils/config.ml beforedepend:: utils/config.ml utils/domainstate.ml utils/domainstate.mli
# Start up the system from the distribution compiler # Start up the system from the distribution compiler
.PHONY: coldstart .PHONY: coldstart

View File

@ -154,6 +154,9 @@ let load_symbol_addr s arg =
else else
I.mov (sym (emit_symbol s)) arg I.mov (sym (emit_symbol s)) arg
let domain_field f =
mem64 QWORD (Domainstate.idx_of_field f * 8) R14
(* Output a label *) (* Output a label *)
let emit_label lbl = let emit_label lbl =
@ -594,8 +597,7 @@ let emit_instr fallthrough i =
If we do the same for Win64, we probably need to change If we do the same for Win64, we probably need to change
amd64nt.asm accordingly. amd64nt.asm accordingly.
*) *)
load_symbol_addr "caml_young_ptr" r11; I.mov (domain_field Domainstate.Domain_young_ptr) r15
I.mov (mem64 QWORD 0 R11) r15
end end
end else begin end else begin
emit_call func; emit_call func;
@ -654,24 +656,7 @@ let emit_instr fallthrough i =
let lbl_redo = new_label() in let lbl_redo = new_label() in
def_label lbl_redo; def_label lbl_redo;
I.sub (int n) r15; I.sub (int n) r15;
let spacetime_node_hole_ptr_is_in_rax = I.cmp (domain_field Domainstate.Domain_young_limit) r15;
Config.spacetime && (i.arg.(0).loc = Reg 0)
in
if !Clflags.dlcode then begin
(* When using Spacetime, %rax might be the node pointer, so we
must take care not to clobber it. (Whilst we can tell the
register allocator that %rax is destroyed by Ialloc, we can't
force that the argument (the node pointer) is not in %rax.) *)
if spacetime_node_hole_ptr_is_in_rax then begin
I.push rax
end;
load_symbol_addr "caml_young_limit" rax;
I.cmp (mem64 QWORD 0 RAX) r15;
if spacetime_node_hole_ptr_is_in_rax then begin
I.pop rax (* this does not affect the flags *)
end
end else
I.cmp (mem64_rip QWORD (emit_symbol "caml_young_limit")) r15;
let lbl_call_gc = new_label() in let lbl_call_gc = new_label() in
let dbg = let dbg =
if not Config.spacetime then Debuginfo.none if not Config.spacetime then Debuginfo.none
@ -887,15 +872,15 @@ let emit_instr fallthrough i =
else else
I.mov (sym (emit_label s)) arg I.mov (sym (emit_label s)) arg
in in
cfi_adjust_cfa_offset 16; load_label_addr lbl_handler r11;
I.sub (int 16) rsp; I.push r11;
cfi_adjust_cfa_offset 8;
I.push (domain_field Domainstate.Domain_exception_pointer);
cfi_adjust_cfa_offset 8;
I.mov rsp (domain_field Domainstate.Domain_exception_pointer);
stack_offset := !stack_offset + 16; stack_offset := !stack_offset + 16;
I.mov r14 (mem64 QWORD 0 RSP);
load_label_addr lbl_handler r14;
I.mov r14 (mem64 QWORD 8 RSP);
I.mov rsp r14
| Lpoptrap -> | Lpoptrap ->
I.pop r14; I.pop (domain_field Domainstate.Domain_exception_pointer);
cfi_adjust_cfa_offset (-8); cfi_adjust_cfa_offset (-8);
I.add (int 8) rsp; I.add (int 8) rsp;
cfi_adjust_cfa_offset (-8); cfi_adjust_cfa_offset (-8);
@ -905,12 +890,16 @@ let emit_instr fallthrough i =
[caml_reraise_exn]. The only function called that might affect the [caml_reraise_exn]. The only function called that might affect the
trie is [caml_stash_backtrace], and it does not. *) trie is [caml_stash_backtrace], and it does not. *)
begin match k with begin match k with
| Cmm.Raise_withtrace -> | Lambda.Raise_regular ->
I.mov (int 0) (domain_field Domainstate.Domain_backtrace_pos);
emit_call "caml_raise_exn"; emit_call "caml_raise_exn";
record_frame Reg.Set.empty true i.dbg record_frame Reg.Set.empty true i.dbg
| Cmm.Raise_notrace -> | Lambda.Raise_reraise ->
I.mov r14 rsp; emit_call "caml_raise_exn";
I.pop r14; record_frame Reg.Set.empty true i.dbg
| Lambda.Raise_notrace ->
I.mov (domain_field Domainstate.Domain_exception_pointer) rsp;
I.pop (domain_field Domainstate.Domain_exception_pointer);
I.pop r11; I.pop r11;
I.jmp r11 I.jmp r11
end end
@ -999,9 +988,6 @@ let begin_assembly() =
float_constants := []; float_constants := [];
all_functions := []; all_functions := [];
if system = S_win64 then begin if system = S_win64 then begin
D.extrn "caml_young_ptr" QWORD;
D.extrn "caml_young_limit" QWORD;
D.extrn "caml_exception_pointer" QWORD;
D.extrn "caml_call_gc" NEAR; D.extrn "caml_call_gc" NEAR;
D.extrn "caml_call_gc1" NEAR; D.extrn "caml_call_gc1" NEAR;
D.extrn "caml_call_gc2" NEAR; D.extrn "caml_call_gc2" NEAR;

View File

@ -44,7 +44,7 @@ let win64 = Arch.win64
r10 10 r10 10
r11 11 r11 11
rbp 12 rbp 12
r14 trap pointer r14 domain state pointer
r15 allocation pointer r15 allocation pointer
xmm0 - xmm15 100 - 115 *) xmm0 - xmm15 100 - 115 *)
@ -325,6 +325,7 @@ let destroyed_at_oper = function
| Iop (Iintop_imm(Icheckbound _, _)) when Config.spacetime -> | Iop (Iintop_imm(Icheckbound _, _)) when Config.spacetime ->
[| loc_spacetime_node_hole |] [| loc_spacetime_node_hole |]
| Iswitch(_, _) -> [| rax; rdx |] | Iswitch(_, _) -> [| rax; rdx |]
| Itrywith _ -> [| r11 |]
| _ -> | _ ->
if fp then if fp then
(* prevent any use of the frame pointer ! *) (* prevent any use of the frame pointer ! *)

View File

@ -650,7 +650,12 @@ let emit_instr i =
then ` sub {emit_reg i.res.(0)}, alloc_ptr, #{emit_int32 a}\n` then ` sub {emit_reg i.res.(0)}, alloc_ptr, #{emit_int32 a}\n`
else ` sub {emit_reg i.res.(0)}, {emit_reg i.res.(0)}, #{emit_int32 a}\n`; else ` sub {emit_reg i.res.(0)}, {emit_reg i.res.(0)}, #{emit_int32 a}\n`;
first := false) in first := false) in
` cmp {emit_reg i.res.(0)}, alloc_limit\n`; let offset = Domainstate.(idx_of_field Domain_young_limit) * 8 in
let tmp = if i.res.(0).loc = Reg 8 (* r12 *) then phys_reg 7 (* r7 *)
else phys_reg 8 (* r12 *)
in
` ldr {emit_reg tmp}, [domain_state_ptr, {emit_int offset}]\n`;
` cmp {emit_reg i.res.(0)}, {emit_reg tmp}\n`;
let lbl_call_gc = new_label() in let lbl_call_gc = new_label() in
` bls {emit_label lbl_call_gc}\n`; ` bls {emit_label lbl_call_gc}\n`;
` sub alloc_ptr, {emit_reg i.res.(0)}, #4\n`; ` sub alloc_ptr, {emit_reg i.res.(0)}, #4\n`;
@ -896,10 +901,16 @@ let emit_instr i =
stack_offset := !stack_offset - 8; 1 stack_offset := !stack_offset - 8; 1
| Lraise k -> | Lraise k ->
begin match k with begin match k with
| Cmm.Raise_withtrace -> | Lambda.Raise_regular ->
let offset = Domainstate.(idx_of_field Domain_backtrace_pos) * 8 in
` mov r12, #0\n`;
` str r12, [domain_state_ptr, {emit_int offset}]\n`;
` {emit_call "caml_raise_exn"}\n`;
`{record_frame Reg.Set.empty true i.dbg}\n`; 3
| Lambda.Raise_reraise ->
` {emit_call "caml_raise_exn"}\n`; ` {emit_call "caml_raise_exn"}\n`;
`{record_frame Reg.Set.empty true i.dbg}\n`; 1 `{record_frame Reg.Set.empty true i.dbg}\n`; 1
| Cmm.Raise_notrace -> | Lambda.Raise_notrace ->
` mov sp, trap_ptr\n`; ` mov sp, trap_ptr\n`;
` pop \{trap_ptr, pc}\n`; 2 ` pop \{trap_ptr, pc}\n`; 2
end end
@ -1019,7 +1030,7 @@ let begin_assembly() =
end; end;
`trap_ptr .req r8\n`; `trap_ptr .req r8\n`;
`alloc_ptr .req r10\n`; `alloc_ptr .req r10\n`;
`alloc_limit .req r11\n`; `domain_state_ptr .req r11\n`;
let lbl_begin = Compilenv.make_symbol (Some "data_begin") in let lbl_begin = Compilenv.make_symbol (Some "data_begin") in
` .data\n`; ` .data\n`;
` .globl {emit_symbol lbl_begin}\n`; ` .globl {emit_symbol lbl_begin}\n`;

View File

@ -34,7 +34,7 @@ let word_addressed = false
r8 trap pointer (preserved) r8 trap pointer (preserved)
r9 platform register, usually reserved r9 platform register, usually reserved
r10 allocation pointer (preserved) r10 allocation pointer (preserved)
r11 allocation limit (preserved) r11 domain state pointer (preserved)
r12 intra-procedural scratch register (not preserved) r12 intra-procedural scratch register (not preserved)
r13 stack pointer r13 stack pointer
r14 return address r14 return address

View File

@ -33,6 +33,7 @@ let fastcode_flag = ref true
(* Names for special regs *) (* Names for special regs *)
let reg_domain_state_ptr = phys_reg 22
let reg_trap_ptr = phys_reg 23 let reg_trap_ptr = phys_reg 23
let reg_alloc_ptr = phys_reg 24 let reg_alloc_ptr = phys_reg 24
let reg_alloc_limit = phys_reg 25 let reg_alloc_limit = phys_reg 25
@ -500,8 +501,9 @@ module BR = Branch_relaxation.Make (struct
| Lpoptrap -> 1 | Lpoptrap -> 1
| Lraise k -> | Lraise k ->
begin match k with begin match k with
| Cmm.Raise_withtrace -> 1 | Lambda.Raise_regular -> 2
| Cmm.Raise_notrace -> 4 | Lambda.Raise_reraise -> 1
| Lambda.Raise_notrace -> 4
end end
let relax_allocation ~num_bytes ~label_after_call_gc = let relax_allocation ~num_bytes ~label_after_call_gc =
@ -894,10 +896,15 @@ let emit_instr i =
stack_offset := !stack_offset - 16 stack_offset := !stack_offset - 16
| Lraise k -> | Lraise k ->
begin match k with begin match k with
| Cmm.Raise_withtrace -> | Lambda.Raise_regular ->
let offset = Domainstate.(idx_of_field Domain_backtrace_pos) * 8 in
` str xzr, [{emit_reg reg_domain_state_ptr}, {emit_int offset}]\n`;
` bl {emit_symbol "caml_raise_exn"}\n`; ` bl {emit_symbol "caml_raise_exn"}\n`;
`{record_frame Reg.Set.empty true i.dbg}\n` `{record_frame Reg.Set.empty true i.dbg}\n`
| Cmm.Raise_notrace -> | Lambda.Raise_reraise ->
` bl {emit_symbol "caml_raise_exn"}\n`;
`{record_frame Reg.Set.empty true i.dbg}\n`
| Lambda.Raise_notrace ->
` mov sp, {emit_reg reg_trap_ptr}\n`; ` mov sp, {emit_reg reg_trap_ptr}\n`;
` ldr {emit_reg reg_tmp1}, [sp, #8]\n`; ` ldr {emit_reg reg_tmp1}, [sp, #8]\n`;
` ldr {emit_reg reg_trap_ptr}, [sp], 16\n`; ` ldr {emit_reg reg_trap_ptr}, [sp], 16\n`;

View File

@ -33,7 +33,8 @@ let word_addressed = false
x0 - x15 general purpose (caller-save) x0 - x15 general purpose (caller-save)
x16, x17 temporaries (used by call veeners) x16, x17 temporaries (used by call veeners)
x18 platform register (reserved) x18 platform register (reserved)
x19 - x25 general purpose (callee-save) x19 - x24 general purpose (callee-save)
x25 domain state pointer
x26 trap pointer x26 trap pointer
x27 alloc pointer x27 alloc pointer
x28 alloc limit x28 alloc limit
@ -49,8 +50,8 @@ let word_addressed = false
let int_reg_name = let int_reg_name =
[| "x0"; "x1"; "x2"; "x3"; "x4"; "x5"; "x6"; "x7"; [| "x0"; "x1"; "x2"; "x3"; "x4"; "x5"; "x6"; "x7";
"x8"; "x9"; "x10"; "x11"; "x12"; "x13"; "x14"; "x15"; "x8"; "x9"; "x10"; "x11"; "x12"; "x13"; "x14"; "x15";
"x19"; "x20"; "x21"; "x22"; "x23"; "x24"; "x25"; "x19"; "x20"; "x21"; "x22"; "x23"; "x24";
"x26"; "x27"; "x28"; "x16"; "x17" |] "x25"; "x26"; "x27"; "x28"; "x16"; "x17" |]
let float_reg_name = let float_reg_name =
[| "d0"; "d1"; "d2"; "d3"; "d4"; "d5"; "d6"; "d7"; [| "d0"; "d1"; "d2"; "d3"; "d4"; "d5"; "d6"; "d7";
@ -66,7 +67,7 @@ let register_class r =
| Float -> 1 | Float -> 1
let num_available_registers = let num_available_registers =
[| 23; 32 |] (* first 23 int regs allocatable; all float regs allocatable *) [| 22; 32 |] (* first 22 int regs allocatable; all float regs allocatable *)
let first_available_register = let first_available_register =
[| 0; 100 |] [| 0; 100 |]
@ -177,8 +178,8 @@ let loc_exn_bucket = phys_reg 0
let int_dwarf_reg_numbers = let int_dwarf_reg_numbers =
[| 0; 1; 2; 3; 4; 5; 6; 7; [| 0; 1; 2; 3; 4; 5; 6; 7;
8; 9; 10; 11; 12; 13; 14; 15; 8; 9; 10; 11; 12; 13; 14; 15;
19; 20; 21; 22; 23; 24; 25; 19; 20; 21; 22; 23; 24;
26; 27; 28; 16; 17; 25; 26; 27; 28; 16; 17;
|] |]
let float_dwarf_reg_numbers = let float_dwarf_reg_numbers =
@ -229,15 +230,15 @@ let destroyed_at_reloadretaddr = [| |]
let safe_register_pressure = function let safe_register_pressure = function
| Iextcall _ -> 8 | Iextcall _ -> 8
| Ialloc _ -> 25 | Ialloc _ -> 24
| _ -> 26 | _ -> 25
let max_register_pressure = function let max_register_pressure = function
| Iextcall _ -> [| 10; 8 |] | Iextcall _ -> [| 10; 8 |]
| Ialloc _ -> [| 25; 32 |] | Ialloc _ -> [| 24; 32 |]
| Iintoffloat | Ifloatofint | Iintoffloat | Ifloatofint
| Iload(Single, _) | Istore(Single, _, _) -> [| 26; 31 |] | Iload(Single, _) | Istore(Single, _, _) -> [| 25; 31 |]
| _ -> [| 26; 32 |] | _ -> [| 25; 32 |]
(* Pure operations (without any side effect besides updating their result (* Pure operations (without any side effect besides updating their result
registers). *) registers). *)

View File

@ -98,10 +98,6 @@ let label_counter = ref 99
let new_label() = incr label_counter; !label_counter let new_label() = incr label_counter; !label_counter
type raise_kind =
| Raise_withtrace
| Raise_notrace
type rec_flag = Nonrecursive | Recursive type rec_flag = Nonrecursive | Recursive
type phantom_defining_expr = type phantom_defining_expr =
@ -143,7 +139,7 @@ and operation =
| Caddf | Csubf | Cmulf | Cdivf | Caddf | Csubf | Cmulf | Cdivf
| Cfloatofint | Cintoffloat | Cfloatofint | Cintoffloat
| Ccmpf of float_comparison | Ccmpf of float_comparison
| Craise of raise_kind | Craise of Lambda.raise_kind
| Ccheckbound | Ccheckbound
type expression = type expression =

View File

@ -83,10 +83,6 @@ val swap_float_comparison: float_comparison -> float_comparison
type label = int type label = int
val new_label: unit -> label val new_label: unit -> label
type raise_kind =
| Raise_withtrace
| Raise_notrace
type rec_flag = Nonrecursive | Recursive type rec_flag = Nonrecursive | Recursive
type phantom_defining_expr = type phantom_defining_expr =
@ -145,7 +141,7 @@ and operation =
| Caddf | Csubf | Cmulf | Cdivf | Caddf | Csubf | Cmulf | Cdivf
| Cfloatofint | Cintoffloat | Cfloatofint | Cintoffloat
| Ccmpf of float_comparison | Ccmpf of float_comparison
| Craise of raise_kind | Craise of Lambda.raise_kind
| Ccheckbound | Ccheckbound
(** Every basic block should have a corresponding [Debuginfo.t] for its (** Every basic block should have a corresponding [Debuginfo.t] for its

View File

@ -435,15 +435,8 @@ let validate d m p =
ucompare2 twoszp md < 0 && ucompare2 md (add2 twoszp twop1) <= 0 ucompare2 twoszp md < 0 && ucompare2 md (add2 twoszp twop1) <= 0
*) *)
let raise_regular dbg exc =
Csequence(
Cop(Cstore (Thirtytwo_signed, Assignment),
[(Cconst_symbol ("caml_backtrace_pos", dbg));
Cconst_int (0, dbg)], dbg),
Cop(Craise Raise_withtrace,[exc], dbg))
let raise_symbol dbg symb = let raise_symbol dbg symb =
raise_regular dbg (Cconst_symbol (symb, dbg)) Cop(Craise Lambda.Raise_regular, [Cconst_symbol (symb, dbg)], dbg)
let rec div_int c1 c2 is_safe dbg = let rec div_int c1 c2 is_safe dbg =
match (c1, c2) with match (c1, c2) with
@ -2394,13 +2387,9 @@ and transl_prim_1 env p arg dbg =
(* always a pointer outside the heap *) (* always a pointer outside the heap *)
(* Exceptions *) (* Exceptions *)
| Praise _ when not (!Clflags.debug) -> | Praise _ when not (!Clflags.debug) ->
Cop(Craise Cmm.Raise_notrace, [transl env arg], dbg) Cop(Craise Lambda.Raise_notrace, [transl env arg], dbg)
| Praise Lambda.Raise_notrace -> | Praise raise_kind ->
Cop(Craise Cmm.Raise_notrace, [transl env arg], dbg) Cop(Craise raise_kind, [transl env arg], dbg)
| Praise Lambda.Raise_reraise ->
Cop(Craise Cmm.Raise_withtrace, [transl env arg], dbg)
| Praise Lambda.Raise_regular ->
raise_regular dbg (transl env arg)
(* Integer operations *) (* Integer operations *)
| Pnegint -> | Pnegint ->
Cop(Csubi, [Cconst_int (2, dbg); transl env arg], dbg) Cop(Csubi, [Cconst_int (2, dbg); transl env arg], dbg)

View File

@ -137,6 +137,12 @@ let register_name r =
let sym32 ?ofs s = mem_sym ?ofs DWORD (emit_symbol s) let sym32 ?ofs s = mem_sym ?ofs DWORD (emit_symbol s)
let domain_field f r =
mem32 DWORD (Domainstate.idx_of_field f * 8) r
let load_domain_state r =
I.mov (sym32 "Caml_state") r
let reg = function let reg = function
| { loc = Reg r } -> register_name r | { loc = Reg r } -> register_name r
| { loc = Stack(Incoming n | Outgoing n) } when n < 0 -> | { loc = Stack(Incoming n | Outgoing n) } when n < 0 ->
@ -609,13 +615,14 @@ let emit_instr fallthrough i =
if !fastcode_flag then begin if !fastcode_flag then begin
let lbl_redo = new_label() in let lbl_redo = new_label() in
def_label lbl_redo; def_label lbl_redo;
I.mov (sym32 "caml_young_ptr") eax; load_domain_state ebx;
I.mov (domain_field Domain_young_ptr RBX) eax;
I.sub (int n) eax; I.sub (int n) eax;
I.cmp (sym32 "caml_young_limit") eax; I.cmp (domain_field Domain_young_limit RBX) eax;
let lbl_call_gc = new_label() in let lbl_call_gc = new_label() in
let lbl_frame = record_frame_label i.live false Debuginfo.none in let lbl_frame = record_frame_label i.live false Debuginfo.none in
I.jb (label lbl_call_gc); I.jb (label lbl_call_gc);
I.mov eax (sym32 "caml_young_ptr"); I.mov eax (domain_field Domain_young_ptr RBX);
I.lea (mem32 NONE 4 RAX) (reg i.res.(0)); I.lea (mem32 NONE 4 RAX) (reg i.res.(0));
call_gc_sites := call_gc_sites :=
{ gc_lbl = lbl_call_gc; { gc_lbl = lbl_call_gc;
@ -865,23 +872,34 @@ let emit_instr fallthrough i =
I.push (label lbl_handler); I.push (label lbl_handler);
if trap_frame_size > 8 then if trap_frame_size > 8 then
I.sub (int (trap_frame_size - 8)) esp; I.sub (int (trap_frame_size - 8)) esp;
I.push (sym32 "caml_exception_pointer"); load_domain_state edx;
I.push (domain_field Domain_exception_pointer RDX);
cfi_adjust_cfa_offset trap_frame_size; cfi_adjust_cfa_offset trap_frame_size;
I.mov esp (sym32 "caml_exception_pointer"); I.mov esp (domain_field Domain_exception_pointer RDX);
stack_offset := !stack_offset + trap_frame_size stack_offset := !stack_offset + trap_frame_size
| Lpoptrap -> | Lpoptrap ->
I.pop (sym32 "caml_exception_pointer"); I.mov edx (mem32 DWORD 4 RSP);
I.add (int (trap_frame_size - 4)) esp; load_domain_state edx;
I.pop (domain_field Domain_exception_pointer RDX);
I.pop edx;
if trap_frame_size > 8 then
I.add (int (trap_frame_size - 8)) esp;
cfi_adjust_cfa_offset (-trap_frame_size); cfi_adjust_cfa_offset (-trap_frame_size);
stack_offset := !stack_offset - trap_frame_size stack_offset := !stack_offset - trap_frame_size
| Lraise k -> | Lraise k ->
begin match k with begin match k with
| Cmm.Raise_withtrace -> | Lambda.Raise_regular ->
load_domain_state ebx;
I.mov (int 0) (domain_field Domain_backtrace_pos RBX);
emit_call "caml_raise_exn"; emit_call "caml_raise_exn";
record_frame Reg.Set.empty true i.dbg record_frame Reg.Set.empty true i.dbg
| Cmm.Raise_notrace -> | Lambda.Raise_reraise ->
I.mov (sym32 "caml_exception_pointer") esp; emit_call "caml_raise_exn";
I.pop (sym32 "caml_exception_pointer"); record_frame Reg.Set.empty true i.dbg
| Lambda.Raise_notrace ->
load_domain_state ebx;
I.mov (domain_field Domain_exception_pointer RBX) esp;
I.pop (domain_field Domain_exception_pointer RBX);
if trap_frame_size > 8 then if trap_frame_size > 8 then
I.add (int (trap_frame_size - 8)) esp; I.add (int (trap_frame_size - 8)) esp;
I.pop ebx; I.pop ebx;
@ -958,9 +976,6 @@ let begin_assembly() =
if system = S_win32 then begin if system = S_win32 then begin
D.mode386 (); D.mode386 ();
D.model "FLAT"; D.model "FLAT";
D.extrn "_caml_young_ptr" DWORD;
D.extrn "_caml_young_limit" DWORD;
D.extrn "_caml_exception_pointer" DWORD;
D.extrn "_caml_extra_params" DWORD; D.extrn "_caml_extra_params" DWORD;
D.extrn "_caml_call_gc" PROC; D.extrn "_caml_call_gc" PROC;
D.extrn "_caml_c_call" PROC; D.extrn "_caml_c_call" PROC;
@ -970,6 +985,7 @@ let begin_assembly() =
D.extrn "_caml_alloc3" PROC; D.extrn "_caml_alloc3" PROC;
D.extrn "_caml_ml_array_bound_error" PROC; D.extrn "_caml_ml_array_bound_error" PROC;
D.extrn "_caml_raise_exn" PROC; D.extrn "_caml_raise_exn" PROC;
D.extrn "_Caml_state" DWORD;
end; end;
D.data (); D.data ();

View File

@ -88,6 +88,7 @@ let phys_reg n =
if n < 100 then hard_int_reg.(n) else hard_float_reg.(n - 100) if n < 100 then hard_int_reg.(n) else hard_float_reg.(n - 100)
let eax = phys_reg 0 let eax = phys_reg 0
let ebx = phys_reg 1
let ecx = phys_reg 2 let ecx = phys_reg 2
let edx = phys_reg 3 let edx = phys_reg 3
@ -204,10 +205,12 @@ let destroyed_at_oper = function
all_phys_regs all_phys_regs
| Iop(Iextcall { alloc = false; }) -> destroyed_at_c_call | Iop(Iextcall { alloc = false; }) -> destroyed_at_c_call
| Iop(Iintop(Idiv | Imod)) -> [| eax; edx |] | Iop(Iintop(Idiv | Imod)) -> [| eax; edx |]
| Iop(Ialloc _ | Iintop Imulh) -> [| eax |] | Iop(Ialloc _) -> [| eax; ebx |]
| Iop(Iintop Imulh) -> [| eax |]
| Iop(Iintop(Icomp _) | Iintop_imm(Icomp _, _)) -> [| eax |] | Iop(Iintop(Icomp _) | Iintop_imm(Icomp _, _)) -> [| eax |]
| Iop(Iintoffloat) -> [| eax |] | Iop(Iintoffloat) -> [| eax |]
| Iifthenelse(Ifloattest _, _, _) -> [| eax |] | Iifthenelse(Ifloattest _, _, _) -> [| eax |]
| Itrywith _ -> [| edx |]
| _ -> [||] | _ -> [||]
let destroyed_at_raise = all_phys_regs let destroyed_at_raise = all_phys_regs

View File

@ -43,7 +43,7 @@ and instruction_desc =
| Ladjust_trap_depth of { delta_traps : int; } | Ladjust_trap_depth of { delta_traps : int; }
| Lpushtrap of { lbl_handler : label; } | Lpushtrap of { lbl_handler : label; }
| Lpoptrap | Lpoptrap
| Lraise of Cmm.raise_kind | Lraise of Lambda.raise_kind
let has_fallthrough = function let has_fallthrough = function
| Lreturn | Lbranch _ | Lswitch _ | Lraise _ | Lreturn | Lbranch _ | Lswitch _ | Lraise _

View File

@ -40,7 +40,7 @@ and instruction_desc =
| Ladjust_trap_depth of { delta_traps : int; } | Ladjust_trap_depth of { delta_traps : int; }
| Lpushtrap of { lbl_handler : label; } | Lpushtrap of { lbl_handler : label; }
| Lpoptrap | Lpoptrap
| Lraise of Cmm.raise_kind | Lraise of Lambda.raise_kind
val has_fallthrough : instruction_desc -> bool val has_fallthrough : instruction_desc -> bool
val end_instr: instruction val end_instr: instruction

View File

@ -84,7 +84,7 @@ and instruction_desc =
| Icatch of Cmm.rec_flag * (int * instruction) list * instruction | Icatch of Cmm.rec_flag * (int * instruction) list * instruction
| Iexit of int | Iexit of int
| Itrywith of instruction * instruction | Itrywith of instruction * instruction
| Iraise of Cmm.raise_kind | Iraise of Lambda.raise_kind
type spacetime_part_of_shape = type spacetime_part_of_shape =
| Direct_call_point of { callee : string; } | Direct_call_point of { callee : string; }

View File

@ -100,7 +100,7 @@ and instruction_desc =
| Icatch of Cmm.rec_flag * (int * instruction) list * instruction | Icatch of Cmm.rec_flag * (int * instruction) list * instruction
| Iexit of int | Iexit of int
| Itrywith of instruction * instruction | Itrywith of instruction * instruction
| Iraise of Cmm.raise_kind | Iraise of Lambda.raise_kind
type spacetime_part_of_shape = type spacetime_part_of_shape =
| Direct_call_point of { callee : string; (* the symbol *) } | Direct_call_point of { callee : string; (* the symbol *) }

View File

@ -742,12 +742,12 @@ let emit_instr i =
end else begin end else begin
match abi with match abi with
| ELF32 -> | ELF32 ->
` addis 28, 0, {emit_upper emit_symbol func}\n`; ` addis 25, 0, {emit_upper emit_symbol func}\n`;
` addi 28, 28, {emit_lower emit_symbol func}\n`; ` addi 25, 25, {emit_lower emit_symbol func}\n`;
emit_call "caml_c_call"; emit_call "caml_c_call";
record_frame i.live false i.dbg record_frame i.live false i.dbg
| ELF64v1 | ELF64v2 -> | ELF64v1 | ELF64v2 ->
emit_tocload emit_gpr 28 (TocSym func); emit_tocload emit_gpr 25 (TocSym func);
emit_call "caml_c_call"; emit_call "caml_c_call";
record_frame i.live false i.dbg; record_frame i.live false i.dbg;
` nop\n` ` nop\n`
@ -1007,11 +1007,23 @@ let emit_instr i =
adjust_stack_offset (-trap_size) adjust_stack_offset (-trap_size)
| Lraise k -> | Lraise k ->
begin match k with begin match k with
| Cmm.Raise_withtrace -> | Lambda.Raise_regular ->
` li 0, 0\n`;
let backtrace_pos =
Domainstate.(idx_of_field Domain_backtrace_pos)
in
begin match abi with
| ELF32 -> ` stw 0, {emit_int (backtrace_pos * 8)}(28)\n`
| _ -> ` std 0, {emit_int (backtrace_pos * 8)}(28)\n`
end;
emit_call "caml_raise_exn"; emit_call "caml_raise_exn";
record_frame Reg.Set.empty true i.dbg; record_frame Reg.Set.empty true i.dbg;
emit_call_nop() emit_call_nop()
| Cmm.Raise_notrace -> | Lambda.Raise_reraise ->
emit_call "caml_raise_exn";
record_frame Reg.Set.empty true i.dbg;
emit_call_nop()
| Lambda.Raise_notrace ->
` {emit_string lg} 0, {emit_int trap_handler_offset}(29)\n`; ` {emit_string lg} 0, {emit_int trap_handler_offset}(29)\n`;
` mr 1, 29\n`; ` mr 1, 29\n`;
` mtctr 0\n`; ` mtctr 0\n`;

View File

@ -34,7 +34,8 @@ let word_addressed = false
3 - 10 function arguments and results 3 - 10 function arguments and results
11 - 12 temporaries 11 - 12 temporaries
13 pointer to small data area 13 pointer to small data area
14 - 28 general purpose, preserved by C 14 - 27 general purpose, preserved by C
28 domain state pointer
29 trap pointer 29 trap pointer
30 allocation limit 30 allocation limit
31 allocation pointer 31 allocation pointer
@ -47,7 +48,7 @@ let word_addressed = false
let int_reg_name = let int_reg_name =
[| "3"; "4"; "5"; "6"; "7"; "8"; "9"; "10"; [| "3"; "4"; "5"; "6"; "7"; "8"; "9"; "10";
"14"; "15"; "16"; "17"; "18"; "19"; "20"; "21"; "14"; "15"; "16"; "17"; "18"; "19"; "20"; "21";
"22"; "23"; "24"; "25"; "26"; "27"; "28" |] "22"; "23"; "24"; "25"; "26"; "27" |]
let float_reg_name = let float_reg_name =
[| "1"; "2"; "3"; "4"; "5"; "6"; "7"; "8"; [| "1"; "2"; "3"; "4"; "5"; "6"; "7"; "8";
@ -62,7 +63,7 @@ let register_class r =
| Val | Int | Addr -> 0 | Val | Int | Addr -> 0
| Float -> 1 | Float -> 1
let num_available_registers = [| 23; 31 |] let num_available_registers = [| 22; 31 |]
let first_available_register = [| 0; 100 |] let first_available_register = [| 0; 100 |]
@ -74,8 +75,8 @@ let rotate_registers = true
(* Representation of hard registers by pseudo-registers *) (* Representation of hard registers by pseudo-registers *)
let hard_int_reg = let hard_int_reg =
let v = Array.make 23 Reg.dummy in let v = Array.make 22 Reg.dummy in
for i = 0 to 22 do v.(i) <- Reg.at_location Int (Reg i) done; v for i = 0 to 21 do v.(i) <- Reg.at_location Int (Reg i) done; v
let hard_float_reg = let hard_float_reg =
let v = Array.make 31 Reg.dummy in let v = Array.make 31 Reg.dummy in
@ -276,7 +277,7 @@ let loc_exn_bucket = phys_reg 0
let int_dwarf_reg_numbers = let int_dwarf_reg_numbers =
[| 3; 4; 5; 6; 7; 8; 9; 10; [| 3; 4; 5; 6; 7; 8; 9; 10;
14; 15; 16; 17; 18; 19; 20; 21; 14; 15; 16; 17; 18; 19; 20; 21;
22; 23; 24; 25; 26; 27; 28; 22; 23; 24; 25; 26; 27;
|] |]
let float_dwarf_reg_numbers = let float_dwarf_reg_numbers =
@ -318,12 +319,12 @@ let destroyed_at_reloadretaddr = [| phys_reg 11 |]
(* Maximal register pressure *) (* Maximal register pressure *)
let safe_register_pressure = function let safe_register_pressure = function
Iextcall _ -> 15 Iextcall _ -> 14
| _ -> 23 | _ -> 22
let max_register_pressure = function let max_register_pressure = function
Iextcall _ -> [| 15; 18 |] Iextcall _ -> [| 14; 18 |]
| _ -> [| 23; 30 |] | _ -> [| 22; 30 |]
(* Pure operations (without any side effect besides updating their result (* Pure operations (without any side effect besides updating their result
registers). *) registers). *)

View File

@ -72,10 +72,6 @@ let chunk = function
| Double -> "float64" | Double -> "float64"
| Double_u -> "float64u" | Double_u -> "float64u"
let raise_kind fmt = function
| Raise_withtrace -> Format.fprintf fmt "raise_withtrace"
| Raise_notrace -> Format.fprintf fmt "raise_notrace"
let phantom_defining_expr ppf defining_expr = let phantom_defining_expr ppf defining_expr =
match defining_expr with match defining_expr with
| Cphantom_const_int i -> Targetint.print ppf i | Cphantom_const_int i -> Targetint.print ppf i
@ -139,7 +135,7 @@ let operation d = function
| Cfloatofint -> "floatofint" | Cfloatofint -> "floatofint"
| Cintoffloat -> "intoffloat" | Cintoffloat -> "intoffloat"
| Ccmpf c -> Printf.sprintf "%sf" (float_comparison c) | Ccmpf c -> Printf.sprintf "%sf" (float_comparison c)
| Craise k -> Format.asprintf "%a%s" raise_kind k (Debuginfo.to_string d) | Craise k -> Lambda.raise_kind k ^ Debuginfo.to_string d
| Ccheckbound -> "checkbound" ^ Debuginfo.to_string d | Ccheckbound -> "checkbound" ^ Debuginfo.to_string d
let rec expr ppf = function let rec expr ppf = function

View File

@ -28,4 +28,3 @@ val expression : formatter -> Cmm.expression -> unit
val fundecl : formatter -> Cmm.fundecl -> unit val fundecl : formatter -> Cmm.fundecl -> unit
val data : formatter -> Cmm.data_item list -> unit val data : formatter -> Cmm.data_item list -> unit
val phrase : formatter -> Cmm.phrase -> unit val phrase : formatter -> Cmm.phrase -> unit
val raise_kind: formatter -> Cmm.raise_kind -> unit

View File

@ -68,7 +68,7 @@ let instr ppf i =
| Lpoptrap -> | Lpoptrap ->
fprintf ppf "pop trap" fprintf ppf "pop trap"
| Lraise k -> | Lraise k ->
fprintf ppf "%a %a" Printcmm.raise_kind k reg i.arg.(0) fprintf ppf "%s %a" (Lambda.raise_kind k) reg i.arg.(0)
end; end;
if not (Debuginfo.is_none i.dbg) then if not (Debuginfo.is_none i.dbg) then
fprintf ppf " %s" (Debuginfo.to_string i.dbg) fprintf ppf " %s" (Debuginfo.to_string i.dbg)

View File

@ -228,7 +228,7 @@ let rec instr ppf i =
fprintf ppf "@[<v 2>try@,%a@;<0 -2>with@,%a@;<0 -2>endtry@]" fprintf ppf "@[<v 2>try@,%a@;<0 -2>with@,%a@;<0 -2>endtry@]"
instr body instr handler instr body instr handler
| Iraise k -> | Iraise k ->
fprintf ppf "%a %a" Printcmm.raise_kind k reg i.arg.(0) fprintf ppf "%s %a" (Lambda.raise_kind k) reg i.arg.(0)
end; end;
if not (Debuginfo.is_none i.dbg) then if not (Debuginfo.is_none i.dbg) then
fprintf ppf "%s" (Debuginfo.to_string i.dbg); fprintf ppf "%s" (Debuginfo.to_string i.dbg);

View File

@ -430,7 +430,8 @@ let emit_instr i =
gc_frame_lbl = lbl_frame } :: !call_gc_sites; gc_frame_lbl = lbl_frame } :: !call_gc_sites;
`{emit_label lbl_redo}:`; `{emit_label lbl_redo}:`;
` lay {emit_reg i.res.(0)}, {emit_int(-n+8)}(%r11)\n`; ` lay {emit_reg i.res.(0)}, {emit_int(-n+8)}(%r11)\n`;
` clgr {emit_reg i.res.(0)}, %r10\n`; let offset = Domainstate.(idx_of_field Domain_young_limit) * 8 in
` clg {emit_reg i.res.(0)}, {emit_int offset}(%r10)\n`;
` brcl 12, {emit_label lbl_call_gc}\n`; ` brcl 12, {emit_label lbl_call_gc}\n`;
(* less than or equal *) (* less than or equal *)
` lay %r11, -8({emit_reg i.res.(0)})\n` ` lay %r11, -8({emit_reg i.res.(0)})\n`
@ -629,10 +630,16 @@ let emit_instr i =
stack_offset := !stack_offset - 16 stack_offset := !stack_offset - 16
| Lraise k -> | Lraise k ->
begin match k with begin match k with
| Cmm.Raise_withtrace -> | Lambda.Raise_regular->
let offset = Domainstate.(idx_of_field Domain_backtrace_pos) * 8 in
` lghi %r1, 0\n`;
` stg %r1, {emit_int offset}(%r10)\n`;
emit_call "caml_raise_exn"; emit_call "caml_raise_exn";
`{record_frame Reg.Set.empty true i.dbg}\n` `{record_frame Reg.Set.empty true i.dbg}\n`
| Cmm.Raise_notrace -> | Lambda.Raise_reraise ->
emit_call "caml_raise_exn";
`{record_frame Reg.Set.empty true i.dbg}\n`
| Lambda.Raise_notrace ->
` lg %r1, 0(%r13)\n`; ` lg %r1, 0(%r13)\n`;
` lgr %r15, %r13\n`; ` lgr %r15, %r13\n`;
` lg %r13, {emit_int size_addr}(%r15)\n`; ` lg %r13, {emit_int size_addr}(%r15)\n`;

View File

@ -35,7 +35,7 @@ let word_addressed = false
2 - 5 function arguments and results (volatile) 2 - 5 function arguments and results (volatile)
6 function arguments and results (preserved by C) 6 function arguments and results (preserved by C)
7 - 9 general purpose, preserved by C 7 - 9 general purpose, preserved by C
10 allocation limit (preserved by C) 10 domain state pointer (preserved by C)
11 allocation pointer (preserved by C) 11 allocation pointer (preserved by C)
12 general purpose (preserved by C) 12 general purpose (preserved by C)
13 trap pointer (preserved by C) 13 trap pointer (preserved by C)

View File

@ -403,8 +403,9 @@ method mark_instr = function
self#mark_c_tailcall (* caml_ml_array_bound_error *) self#mark_c_tailcall (* caml_ml_array_bound_error *)
| Iraise raise_kind -> | Iraise raise_kind ->
begin match raise_kind with begin match raise_kind with
| Cmm.Raise_notrace -> () | Lambda.Raise_notrace -> ()
| Cmm.Raise_withtrace -> | Lambda.Raise_regular
| Lambda.Raise_reraise ->
(* PR#6239 *) (* PR#6239 *)
(* caml_stash_backtrace; we #mark_call rather than (* caml_stash_backtrace; we #mark_call rather than
#mark_c_tailcall to get a good stack backtrace *) #mark_c_tailcall to get a good stack backtrace *)

View File

@ -19,6 +19,7 @@
#include "caml/backtrace.h" #include "caml/backtrace.h"
#include "caml/callback.h" #include "caml/callback.h"
#include "caml/custom.h" #include "caml/custom.h"
#include "caml/domain.h"
#include "caml/fail.h" #include "caml/fail.h"
#include "caml/io.h" #include "caml/io.h"
#include "caml/memory.h" #include "caml/memory.h"
@ -70,15 +71,15 @@ struct caml_thread_descr {
/* The infos on threads (allocated via caml_stat_alloc()) */ /* The infos on threads (allocated via caml_stat_alloc()) */
struct caml_thread_struct { struct caml_thread_struct {
value descr; /* The heap-allocated descriptor (root) */ value descr; /* The heap-allocated descriptor (root) */
struct caml_thread_struct * next; /* Double linking of running threads */ struct caml_thread_struct * next; /* Double linking of running threads */
struct caml_thread_struct * prev; struct caml_thread_struct * prev;
#ifdef NATIVE_CODE #ifdef NATIVE_CODE
char * top_of_stack; /* Top of stack for this thread (approx.) */ char * top_of_stack; /* Top of stack for this thread (approx.) */
char * bottom_of_stack; /* Saved value of caml_bottom_of_stack */ char * bottom_of_stack; /* Saved value of Caml_state->bottom_of_stack */
uintnat last_retaddr; /* Saved value of caml_last_return_address */ uintnat last_retaddr; /* Saved value of Caml_state->last_return_address */
value * gc_regs; /* Saved value of caml_gc_regs */ value * gc_regs; /* Saved value of Caml_state->gc_regs */
char * exception_pointer; /* Saved value of caml_exception_pointer */ char * exception_pointer; /* Saved value of Caml_state->exception_pointer */
struct caml__roots_block * local_roots; /* Saved value of local_roots */ struct caml__roots_block * local_roots; /* Saved value of local_roots */
struct longjmp_buffer * exit_buf; /* For thread exit */ struct longjmp_buffer * exit_buf; /* For thread exit */
#if defined(NATIVE_CODE) && defined(WITH_SPACETIME) #if defined(NATIVE_CODE) && defined(WITH_SPACETIME)
@ -88,17 +89,18 @@ struct caml_thread_struct {
value* spacetime_finaliser_trie_root; value* spacetime_finaliser_trie_root;
#endif #endif
#else #else
value * stack_low; /* The execution stack for this thread */ value * stack_low; /* The execution stack for this thread */
value * stack_high; value * stack_high;
value * stack_threshold; value * stack_threshold;
value * sp; /* Saved value of caml_extern_sp for this thread */ value * sp; /* Saved value of Caml_state->extern_sp for this thread */
value * trapsp; /* Saved value of caml_trapsp for this thread */ value * trapsp; /* Saved value of Caml_state->trapsp for this thread */
struct caml__roots_block * local_roots; /* Saved value of caml_local_roots */ /* Saved value of Caml_state->local_roots */
struct longjmp_buffer * external_raise; /* Saved caml_external_raise */ struct caml__roots_block * local_roots;
struct longjmp_buffer * external_raise; /* Saved Caml_state->external_raise */
#endif #endif
int backtrace_pos; /* Saved caml_backtrace_pos */ int backtrace_pos; /* Saved Caml_state->backtrace_pos */
backtrace_slot * backtrace_buffer; /* Saved caml_backtrace_buffer */ backtrace_slot * backtrace_buffer; /* Saved Caml_state->backtrace_buffer */
value backtrace_last_exn; /* Saved caml_backtrace_last_exn (root) */ value backtrace_last_exn; /* Saved Caml_state->backtrace_last_exn (root) */
int memprof_suspended; /* Saved caml_memprof_suspended */ int memprof_suspended; /* Saved caml_memprof_suspended */
}; };
@ -173,12 +175,11 @@ static void caml_thread_scan_roots(scanning_action action)
static inline void caml_thread_save_runtime_state(void) static inline void caml_thread_save_runtime_state(void)
{ {
#ifdef NATIVE_CODE #ifdef NATIVE_CODE
curr_thread->top_of_stack = caml_top_of_stack; curr_thread->top_of_stack = Caml_state->top_of_stack;
curr_thread->bottom_of_stack = caml_bottom_of_stack; curr_thread->bottom_of_stack = Caml_state->bottom_of_stack;
curr_thread->last_retaddr = caml_last_return_address; curr_thread->last_retaddr = Caml_state->last_return_address;
curr_thread->gc_regs = caml_gc_regs; curr_thread->gc_regs = Caml_state->gc_regs;
curr_thread->exception_pointer = caml_exception_pointer; curr_thread->exception_pointer = Caml_state->exception_pointer;
curr_thread->local_roots = caml_local_roots;
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
curr_thread->spacetime_trie_node_ptr curr_thread->spacetime_trie_node_ptr
= caml_spacetime_trie_node_ptr; = caml_spacetime_trie_node_ptr;
@ -186,29 +187,28 @@ static inline void caml_thread_save_runtime_state(void)
= caml_spacetime_finaliser_trie_root; = caml_spacetime_finaliser_trie_root;
#endif #endif
#else #else
curr_thread->stack_low = caml_stack_low; curr_thread->stack_low = Caml_state->stack_low;
curr_thread->stack_high = caml_stack_high; curr_thread->stack_high = Caml_state->stack_high;
curr_thread->stack_threshold = caml_stack_threshold; curr_thread->stack_threshold = Caml_state->stack_threshold;
curr_thread->sp = caml_extern_sp; curr_thread->sp = Caml_state->extern_sp;
curr_thread->trapsp = caml_trapsp; curr_thread->trapsp = Caml_state->trapsp;
curr_thread->local_roots = caml_local_roots; curr_thread->external_raise = Caml_state->external_raise;
curr_thread->external_raise = caml_external_raise;
#endif #endif
curr_thread->backtrace_pos = caml_backtrace_pos; curr_thread->local_roots = Caml_state->local_roots;
curr_thread->backtrace_buffer = caml_backtrace_buffer; curr_thread->backtrace_pos = Caml_state->backtrace_pos;
curr_thread->backtrace_last_exn = caml_backtrace_last_exn; curr_thread->backtrace_buffer = Caml_state->backtrace_buffer;
curr_thread->backtrace_last_exn = Caml_state->backtrace_last_exn;
curr_thread->memprof_suspended = caml_memprof_suspended; curr_thread->memprof_suspended = caml_memprof_suspended;
} }
static inline void caml_thread_restore_runtime_state(void) static inline void caml_thread_restore_runtime_state(void)
{ {
#ifdef NATIVE_CODE #ifdef NATIVE_CODE
caml_top_of_stack = curr_thread->top_of_stack; Caml_state->top_of_stack = curr_thread->top_of_stack;
caml_bottom_of_stack= curr_thread->bottom_of_stack; Caml_state->bottom_of_stack= curr_thread->bottom_of_stack;
caml_last_return_address = curr_thread->last_retaddr; Caml_state->last_return_address = curr_thread->last_retaddr;
caml_gc_regs = curr_thread->gc_regs; Caml_state->gc_regs = curr_thread->gc_regs;
caml_exception_pointer = curr_thread->exception_pointer; Caml_state->exception_pointer = curr_thread->exception_pointer;
caml_local_roots = curr_thread->local_roots;
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
caml_spacetime_trie_node_ptr caml_spacetime_trie_node_ptr
= curr_thread->spacetime_trie_node_ptr; = curr_thread->spacetime_trie_node_ptr;
@ -216,17 +216,17 @@ static inline void caml_thread_restore_runtime_state(void)
= curr_thread->spacetime_finaliser_trie_root; = curr_thread->spacetime_finaliser_trie_root;
#endif #endif
#else #else
caml_stack_low = curr_thread->stack_low; Caml_state->stack_low = curr_thread->stack_low;
caml_stack_high = curr_thread->stack_high; Caml_state->stack_high = curr_thread->stack_high;
caml_stack_threshold = curr_thread->stack_threshold; Caml_state->stack_threshold = curr_thread->stack_threshold;
caml_extern_sp = curr_thread->sp; Caml_state->extern_sp = curr_thread->sp;
caml_trapsp = curr_thread->trapsp; Caml_state->trapsp = curr_thread->trapsp;
caml_local_roots = curr_thread->local_roots; Caml_state->external_raise = curr_thread->external_raise;
caml_external_raise = curr_thread->external_raise;
#endif #endif
caml_backtrace_pos = curr_thread->backtrace_pos; Caml_state->local_roots = curr_thread->local_roots;
caml_backtrace_buffer = curr_thread->backtrace_buffer; Caml_state->backtrace_pos = curr_thread->backtrace_pos;
caml_backtrace_last_exn = curr_thread->backtrace_last_exn; Caml_state->backtrace_buffer = curr_thread->backtrace_buffer;
Caml_state->backtrace_last_exn = curr_thread->backtrace_last_exn;
caml_memprof_suspended = curr_thread->memprof_suspended; caml_memprof_suspended = curr_thread->memprof_suspended;
} }
@ -701,7 +701,7 @@ CAMLprim value caml_thread_uncaught_exception(value exn) /* ML */
fprintf(stderr, "Thread %d killed on uncaught exception %s\n", fprintf(stderr, "Thread %d killed on uncaught exception %s\n",
Int_val(Ident(curr_thread->descr)), msg); Int_val(Ident(curr_thread->descr)), msg);
caml_stat_free(msg); caml_stat_free(msg);
if (caml_backtrace_active) caml_print_exception_backtrace(); if (Caml_state->backtrace_active) caml_print_exception_backtrace();
fflush(stderr); fflush(stderr);
return Val_unit; return Val_unit;
} }

File diff suppressed because it is too large Load Diff

View File

@ -26,7 +26,7 @@ BYTECODE_C_SOURCES := $(addsuffix .c, \
signals_byt printexc backtrace_byt backtrace compare ints \ signals_byt printexc backtrace_byt backtrace compare ints \
floats str array io extern intern hash sys meta parsing gc_ctrl md5 obj \ floats str array io extern intern hash sys meta parsing gc_ctrl md5 obj \
lexing callback debugger weak compact finalise custom dynlink \ lexing callback debugger weak compact finalise custom dynlink \
spacetime_byt afl $(UNIX_OR_WIN32) bigarray main memprof) spacetime_byt afl $(UNIX_OR_WIN32) bigarray main memprof domain)
NATIVE_C_SOURCES := $(addsuffix .c, \ NATIVE_C_SOURCES := $(addsuffix .c, \
startup_aux startup_nat main fail_nat roots_nat signals \ startup_aux startup_nat main fail_nat roots_nat signals \
@ -35,7 +35,7 @@ NATIVE_C_SOURCES := $(addsuffix .c, \
lexing $(UNIX_OR_WIN32) printexc callback weak compact finalise custom \ lexing $(UNIX_OR_WIN32) printexc callback weak compact finalise custom \
globroots backtrace_nat backtrace dynlink_nat debugger meta \ globroots backtrace_nat backtrace dynlink_nat debugger meta \
dynlink clambda_checks spacetime_nat spacetime_snapshot afl bigarray \ dynlink clambda_checks spacetime_nat spacetime_snapshot afl bigarray \
memprof) memprof domain)
# The other_files variable stores the list of files whose dependencies # The other_files variable stores the list of files whose dependencies
# should be computed by `make depend` although they do not need to be # should be computed by `make depend` although they do not need to be
@ -190,7 +190,7 @@ ifneq "$(BYTECODE_SHARED_LIBRARIES)" ""
$(INSTALL_PROG) $(BYTECODE_SHARED_LIBRARIES) "$(INSTALL_LIBDIR)" $(INSTALL_PROG) $(BYTECODE_SHARED_LIBRARIES) "$(INSTALL_LIBDIR)"
endif endif
mkdir -p "$(INSTALL_INCDIR)" mkdir -p "$(INSTALL_INCDIR)"
$(INSTALL_DATA) caml/*.h "$(INSTALL_INCDIR)" $(INSTALL_DATA) caml/domain_state.tbl caml/*.h "$(INSTALL_INCDIR)"
.PHONY: installopt .PHONY: installopt
installopt: installopt:
@ -203,7 +203,7 @@ endif
clean: clean:
rm -f $(PROGRAMS) *.$(O) *.$(A) *.$(SO) ld.conf rm -f $(PROGRAMS) *.$(O) *.$(A) *.$(SO) ld.conf
rm -f primitives prims.c caml/opnames.h caml/jumptbl.h rm -f primitives prims.c caml/opnames.h caml/jumptbl.h
rm -f caml/version.h rm -f caml/version.h domain_state*.inc
.PHONY: distclean .PHONY: distclean
distclean: clean distclean: clean
@ -358,7 +358,16 @@ $(foreach object_type,$(subst %,,$(object_types)), \
%_libasmrunpic.o: %.S %_libasmrunpic.o: %.S
$(ASPP) $(ASPPFLAGS) $(SHAREDLIB_CFLAGS) -o $@ $< $(ASPP) $(ASPPFLAGS) $(SHAREDLIB_CFLAGS) -o $@ $<
%.obj: %.asm domain_state64.inc: caml/domain_state.tbl gen_domain_state64_inc.awk
awk -f gen_domain_state64_inc.awk $< > $@
domain_state32.inc: caml/domain_state.tbl gen_domain_state32_inc.awk
awk -f gen_domain_state32_inc.awk $< > $@
amd64nt.obj: amd64nt.asm domain_state64.inc
$(ASM)$@ $(ASMFLAGS) $<
i386nt.obj: i386nt.asm domain_state32.inc
$(ASM)$@ $(ASMFLAGS) $< $(ASM)$@ $(ASMFLAGS) $<
%_libasmrunpic.obj: %.asm %_libasmrunpic.obj: %.asm

View File

@ -112,6 +112,15 @@
#endif #endif
.set domain_curr_field, 0
#define DOMAIN_STATE(c_type, name) \
.equ domain_field_caml_##name, domain_curr_field ; \
.set domain_curr_field, domain_curr_field + 1
#include "../runtime/caml/domain_state.tbl"
#undef DOMAIN_STATE
#define Caml_state(var) (8*domain_field_caml_##var)(%r14)
#if defined(__PIC__) && !defined(SYS_mingw64) && !defined(SYS_cygwin) #if defined(__PIC__) && !defined(SYS_mingw64) && !defined(SYS_cygwin)
/* Position-independent operations on global variables. */ /* Position-independent operations on global variables. */
@ -154,9 +163,9 @@
#define RECORD_STACK_FRAME(OFFSET) \ #define RECORD_STACK_FRAME(OFFSET) \
pushq %r11 ; CFI_ADJUST(8); \ pushq %r11 ; CFI_ADJUST(8); \
movq 8+OFFSET(%rsp), %rax ; \ movq 8+OFFSET(%rsp), %rax ; \
STORE_VAR(%rax,caml_last_return_address) ; \ movq %rax, Caml_state(last_return_address) ; \
leaq 16+OFFSET(%rsp), %rax ; \ leaq 16+OFFSET(%rsp), %rax ; \
STORE_VAR(%rax,caml_bottom_of_stack) ; \ movq %rax, Caml_state(bottom_of_stack) ; \
popq %r11; CFI_ADJUST(-8) popq %r11; CFI_ADJUST(-8)
/* Load address of global [label] in register [dst]. */ /* Load address of global [label] in register [dst]. */
@ -190,9 +199,9 @@
#define RECORD_STACK_FRAME(OFFSET) \ #define RECORD_STACK_FRAME(OFFSET) \
movq OFFSET(%rsp), %rax ; \ movq OFFSET(%rsp), %rax ; \
STORE_VAR(%rax,caml_last_return_address) ; \ movq %rax, Caml_state(last_return_address) ; \
leaq 8+OFFSET(%rsp), %rax ; \ leaq 8+OFFSET(%rsp), %rax ; \
STORE_VAR(%rax,caml_bottom_of_stack) movq %rax, Caml_state(bottom_of_stack)
#define LEA_VAR(label,dst) \ #define LEA_VAR(label,dst) \
leaq G(label)(%rip), dst leaq G(label)(%rip), dst
@ -326,7 +335,7 @@ LBL(caml_call_gc):
subq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(STACK_PROBE_SIZE); subq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(STACK_PROBE_SIZE);
movq %rax, 0(%rsp) movq %rax, 0(%rsp)
addq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(-STACK_PROBE_SIZE); addq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(-STACK_PROBE_SIZE);
/* Build array of registers, save it into caml_gc_regs */ /* Build array of registers, save it into Caml_state->gc_regs */
#ifdef WITH_FRAME_POINTERS #ifdef WITH_FRAME_POINTERS
ENTER_FUNCTION ; ENTER_FUNCTION ;
#else #else
@ -344,10 +353,9 @@ LBL(caml_call_gc):
pushq %rdi; CFI_ADJUST (8); pushq %rdi; CFI_ADJUST (8);
pushq %rbx; CFI_ADJUST (8); pushq %rbx; CFI_ADJUST (8);
pushq %rax; CFI_ADJUST (8); pushq %rax; CFI_ADJUST (8);
STORE_VAR(%rsp, caml_gc_regs) movq %rsp, Caml_state(gc_regs)
/* Save caml_young_ptr, caml_exception_pointer */ /* Save young_ptr */
STORE_VAR(%r15, caml_young_ptr) movq %r15, Caml_state(young_ptr)
STORE_VAR(%r14, caml_exception_pointer)
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
STORE_VAR(%r13, caml_spacetime_trie_node_ptr) STORE_VAR(%r13, caml_spacetime_trie_node_ptr)
#endif #endif
@ -373,9 +381,8 @@ LBL(caml_call_gc):
PREPARE_FOR_C_CALL PREPARE_FOR_C_CALL
call GCALL(caml_garbage_collection) call GCALL(caml_garbage_collection)
CLEANUP_AFTER_C_CALL CLEANUP_AFTER_C_CALL
/* Restore caml_young_ptr, caml_exception_pointer */ /* Restore young_ptr */
LOAD_VAR(caml_young_ptr, %r15) movq Caml_state(young_ptr), %r15
LOAD_VAR(caml_exception_pointer, %r14)
/* Restore all regs used by the code generator */ /* Restore all regs used by the code generator */
movsd 0*8(%rsp), %xmm0 movsd 0*8(%rsp), %xmm0
movsd 1*8(%rsp), %xmm1 movsd 1*8(%rsp), %xmm1
@ -420,7 +427,7 @@ FUNCTION(G(caml_alloc1))
CFI_STARTPROC CFI_STARTPROC
LBL(caml_alloc1): LBL(caml_alloc1):
subq $16, %r15 subq $16, %r15
CMP_VAR(caml_young_limit, %r15) cmpq Caml_state(young_limit), %r15
jb LBL(100) jb LBL(100)
ret ret
LBL(100): LBL(100):
@ -439,7 +446,7 @@ FUNCTION(G(caml_alloc2))
CFI_STARTPROC CFI_STARTPROC
LBL(caml_alloc2): LBL(caml_alloc2):
subq $24, %r15 subq $24, %r15
CMP_VAR(caml_young_limit, %r15) cmpq Caml_state(young_limit), %r15
jb LBL(101) jb LBL(101)
ret ret
LBL(101): LBL(101):
@ -458,7 +465,7 @@ FUNCTION(G(caml_alloc3))
CFI_STARTPROC CFI_STARTPROC
LBL(caml_alloc3): LBL(caml_alloc3):
subq $32, %r15 subq $32, %r15
CMP_VAR(caml_young_limit, %r15) cmpq Caml_state(young_limit), %r15
jb LBL(102) jb LBL(102)
ret ret
LBL(102): LBL(102):
@ -478,7 +485,7 @@ CFI_STARTPROC
LBL(caml_allocN): LBL(caml_allocN):
pushq %rax; CFI_ADJUST(8) /* save desired size */ pushq %rax; CFI_ADJUST(8) /* save desired size */
subq %rax, %r15 subq %rax, %r15
CMP_VAR(caml_young_limit, %r15) cmpq Caml_state(young_limit), %r15
jb LBL(103) jb LBL(103)
addq $8, %rsp; CFI_ADJUST (-8) /* drop desired size */ addq $8, %rsp; CFI_ADJUST (-8) /* drop desired size */
ret ret
@ -529,23 +536,22 @@ FUNCTION(G(caml_c_call))
CFI_STARTPROC CFI_STARTPROC
LBL(caml_c_call): LBL(caml_c_call):
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
popq %r12; CFI_ADJUST(-8) popq Caml_state(last_return_address); CFI_ADJUST(-8)
STORE_VAR(%r12, caml_last_return_address) movq %rsp, Caml_state(bottom_of_stack)
STORE_VAR(%rsp, caml_bottom_of_stack) /* equivalent to pushing last return address */
subq $8, %rsp; CFI_ADJUST(8)
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
/* Record the trie node hole pointer that corresponds to /* Record the trie node hole pointer that corresponds to
[caml_last_return_address] */ [Caml_state->last_return_address] */
STORE_VAR(%r13, caml_spacetime_trie_node_ptr) STORE_VAR(%r13, caml_spacetime_trie_node_ptr)
#endif #endif
subq $8, %rsp; CFI_ADJUST(8) /* equivalent to pushq %r12 */
/* Touch the stack to trigger a recoverable segfault /* Touch the stack to trigger a recoverable segfault
if insufficient space remains */ if insufficient space remains */
subq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(STACK_PROBE_SIZE); subq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(STACK_PROBE_SIZE);
movq %rax, 0(%rsp) movq %rax, 0(%rsp)
addq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(-STACK_PROBE_SIZE); addq $(STACK_PROBE_SIZE), %rsp; CFI_ADJUST(-STACK_PROBE_SIZE);
/* Make the exception handler and alloc ptr available to the C code */ /* Make the alloc ptr available to the C code */
STORE_VAR(%r15, caml_young_ptr) movq %r15, Caml_state(young_ptr)
STORE_VAR(%r14, caml_exception_pointer)
/* Call the function (address in %rax) */ /* Call the function (address in %rax) */
/* No need to PREPARE_FOR_C_CALL since the caller already /* No need to PREPARE_FOR_C_CALL since the caller already
reserved the stack space if needed (cf. amd64/proc.ml) */ reserved the stack space if needed (cf. amd64/proc.ml) */
@ -559,6 +565,8 @@ FUNCTION(G(caml_start_program))
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
PUSH_CALLEE_SAVE_REGS PUSH_CALLEE_SAVE_REGS
/* Load Caml_state into r14 (was passed as an argument from C) */
movq C_ARG_1, %r14
/* Initial entry point is G(caml_program) */ /* Initial entry point is G(caml_program) */
LEA_VAR(caml_program, %r12) LEA_VAR(caml_program, %r12)
/* Common code for caml_start_program and caml_callback* */ /* Common code for caml_start_program and caml_callback* */
@ -569,9 +577,9 @@ LBL(caml_start_program):
#else #else
subq $8, %rsp; CFI_ADJUST (8) /* stack 16-aligned */ subq $8, %rsp; CFI_ADJUST (8) /* stack 16-aligned */
#endif #endif
PUSH_VAR(caml_gc_regs) pushq Caml_state(gc_regs); CFI_ADJUST(8)
PUSH_VAR(caml_last_return_address) pushq Caml_state(last_return_address); CFI_ADJUST(8)
PUSH_VAR(caml_bottom_of_stack) pushq Caml_state(bottom_of_stack); CFI_ADJUST(8)
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
/* Save arguments to caml_callback* */ /* Save arguments to caml_callback* */
pushq %rax; CFI_ADJUST (8) pushq %rax; CFI_ADJUST (8)
@ -587,14 +595,13 @@ LBL(caml_start_program):
popq %rbx; CFI_ADJUST (-8) popq %rbx; CFI_ADJUST (-8)
popq %rax; CFI_ADJUST (-8) popq %rax; CFI_ADJUST (-8)
#endif #endif
/* Setup alloc ptr and exception ptr */ /* Setup alloc ptr */
LOAD_VAR(caml_young_ptr, %r15) movq Caml_state(young_ptr), %r15
LOAD_VAR(caml_exception_pointer, %r14)
/* Build an exception handler */ /* Build an exception handler */
lea LBL(108)(%rip), %r13 lea LBL(108)(%rip), %r13
pushq %r13; CFI_ADJUST(8) pushq %r13; CFI_ADJUST(8)
pushq %r14; CFI_ADJUST(8) pushq Caml_state(exception_pointer); CFI_ADJUST(8)
movq %rsp, %r14 movq %rsp, Caml_state(exception_pointer)
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
LOAD_VAR(caml_spacetime_trie_node_ptr, %r13) LOAD_VAR(caml_spacetime_trie_node_ptr, %r13)
#endif #endif
@ -602,16 +609,15 @@ LBL(caml_start_program):
call *%r12 call *%r12
LBL(107): LBL(107):
/* Pop the exception handler */ /* Pop the exception handler */
popq %r14; CFI_ADJUST(-8) popq Caml_state(exception_pointer); CFI_ADJUST(-8)
popq %r12; CFI_ADJUST(-8) /* dummy register */ popq %r12; CFI_ADJUST(-8) /* dummy register */
LBL(109): LBL(109):
/* Update alloc ptr and exception ptr */ /* Update alloc ptr */
STORE_VAR(%r15,caml_young_ptr) movq %r15, Caml_state(young_ptr)
STORE_VAR(%r14,caml_exception_pointer)
/* Pop the callback link, restoring the global variables */ /* Pop the callback link, restoring the global variables */
POP_VAR(caml_bottom_of_stack) popq Caml_state(bottom_of_stack); CFI_ADJUST(-8)
POP_VAR(caml_last_return_address) popq Caml_state(last_return_address); CFI_ADJUST(-8)
POP_VAR(caml_gc_regs) popq Caml_state(gc_regs); CFI_ADJUST(-8)
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
POP_VAR(caml_spacetime_trie_node_ptr) POP_VAR(caml_spacetime_trie_node_ptr)
#else #else
@ -633,10 +639,10 @@ ENDFUNCTION(G(caml_start_program))
FUNCTION(G(caml_raise_exn)) FUNCTION(G(caml_raise_exn))
CFI_STARTPROC CFI_STARTPROC
TESTL_VAR($1, caml_backtrace_active) testq $1, Caml_state(backtrace_active)
jne LBL(110) jne LBL(110)
movq %r14, %rsp movq Caml_state(exception_pointer), %rsp
popq %r14 popq Caml_state(exception_pointer); CFI_ADJUST(-8)
ret ret
LBL(110): LBL(110):
movq %rax, %r12 /* Save exception bucket */ movq %rax, %r12 /* Save exception bucket */
@ -649,14 +655,15 @@ LBL(110):
popq C_ARG_2 /* arg 2: pc of raise */ popq C_ARG_2 /* arg 2: pc of raise */
movq %rsp, C_ARG_3 /* arg 3: sp at raise */ movq %rsp, C_ARG_3 /* arg 3: sp at raise */
#endif #endif
movq %r14, C_ARG_4 /* arg 4: sp of handler */ /* arg 4: sp of handler */
movq Caml_state(exception_pointer), C_ARG_4
/* PR#5700: thanks to popq above, stack is now 16-aligned */ /* PR#5700: thanks to popq above, stack is now 16-aligned */
/* Thanks to ENTER_FUNCTION, stack is now 16-aligned */ /* Thanks to ENTER_FUNCTION, stack is now 16-aligned */
PREPARE_FOR_C_CALL /* no need to cleanup after */ PREPARE_FOR_C_CALL /* no need to cleanup after */
call GCALL(caml_stash_backtrace) call GCALL(caml_stash_backtrace)
movq %r12, %rax /* Recover exception bucket */ movq %r12, %rax /* Recover exception bucket */
movq %r14, %rsp movq Caml_state(exception_pointer), %rsp
popq %r14 popq Caml_state(exception_pointer); CFI_ADJUST(-8)
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(G(caml_raise_exn)) ENDFUNCTION(G(caml_raise_exn))
@ -665,31 +672,39 @@ ENDFUNCTION(G(caml_raise_exn))
FUNCTION(G(caml_raise_exception)) FUNCTION(G(caml_raise_exception))
CFI_STARTPROC CFI_STARTPROC
TESTL_VAR($1, caml_backtrace_active) movq C_ARG_1, %r14 /* Caml_state */
testq $1, Caml_state(backtrace_active)
jne LBL(112) jne LBL(112)
movq C_ARG_1, %rax movq C_ARG_2, %rax
LOAD_VAR(caml_exception_pointer, %rsp) /* Cut stack */ movq Caml_state(exception_pointer), %rsp /* Cut stack */
popq %r14 /* Recover previous exception handler */ /* Recover previous exception handler */
LOAD_VAR(caml_young_ptr, %r15) /* Reload alloc ptr */ popq Caml_state(exception_pointer); CFI_ADJUST(-8)
movq Caml_state(young_ptr), %r15 /* Reload alloc ptr */
ret ret
LBL(112): LBL(112):
#ifdef WITH_FRAME_POINTERS #ifdef WITH_FRAME_POINTERS
ENTER_FUNCTION ; ENTER_FUNCTION ;
#endif #endif
movq C_ARG_1, %r12 /* Save exception bucket */ /* Save exception bucket. Caml_state in r14 saved across C calls. */
/* arg 1: exception bucket */ movq C_ARG_2, %r12
LOAD_VAR(caml_last_return_address,C_ARG_2) /* arg 2: pc of raise */ /* arg 1: exception bucket */
LOAD_VAR(caml_bottom_of_stack,C_ARG_3) /* arg 3: sp of raise */ movq C_ARG_2, C_ARG_1
LOAD_VAR(caml_exception_pointer,C_ARG_4) /* arg 4: sp of handler */ /* arg 2: pc of raise */
movq Caml_state(last_return_address), C_ARG_2
/* arg 3: sp of raise */
movq Caml_state(bottom_of_stack), C_ARG_3
/* arg 4: sp of handler */
movq Caml_state(exception_pointer), C_ARG_4
#ifndef WITH_FRAME_POINTERS #ifndef WITH_FRAME_POINTERS
subq $8, %rsp /* PR#5700: maintain stack alignment */ subq $8, %rsp /* PR#5700: maintain stack alignment */
#endif #endif
PREPARE_FOR_C_CALL /* no need to cleanup after */ PREPARE_FOR_C_CALL /* no need to cleanup after */
call GCALL(caml_stash_backtrace) call GCALL(caml_stash_backtrace)
movq %r12, %rax /* Recover exception bucket */ movq %r12, %rax /* Recover exception bucket */
LOAD_VAR(caml_exception_pointer,%rsp) movq Caml_state(exception_pointer), %rsp
popq %r14 /* Recover previous exception handler */ /* Recover previous exception handler */
LOAD_VAR(caml_young_ptr,%r15) /* Reload alloc ptr */ popq Caml_state(exception_pointer); CFI_ADJUST(-8)
movq Caml_state(young_ptr), %r15 /* Reload alloc ptr */
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(G(caml_raise_exception)) ENDFUNCTION(G(caml_raise_exception))
@ -701,52 +716,57 @@ ENDFUNCTION(G(caml_raise_exception))
backtrace anyway. */ backtrace anyway. */
FUNCTION(G(caml_stack_overflow)) FUNCTION(G(caml_stack_overflow))
movq C_ARG_1, %r14 /* Caml_state */
LEA_VAR(caml_exn_Stack_overflow, %rax) LEA_VAR(caml_exn_Stack_overflow, %rax)
movq %r14, %rsp /* cut the stack */ movq Caml_state(exception_pointer), %rsp /* cut the stack */
popq %r14 /* recover previous exn handler */ /* Recover previous exn handler */
ret /* jump to handler's code */ popq Caml_state(exception_pointer)
ret /* jump to handler's code */
ENDFUNCTION(G(caml_stack_overflow)) ENDFUNCTION(G(caml_stack_overflow))
/* Callback from C to OCaml */ /* Callback from C to OCaml */
FUNCTION(G(caml_callback_exn)) FUNCTION(G(caml_callback_asm))
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
PUSH_CALLEE_SAVE_REGS PUSH_CALLEE_SAVE_REGS
/* Initial loading of arguments */ /* Initial loading of arguments */
movq C_ARG_1, %rbx /* closure */ movq C_ARG_1, %r14 /* Caml_state */
movq C_ARG_2, %rax /* argument */ movq C_ARG_2, %rbx /* closure */
movq 0(C_ARG_3), %rax /* argument */
movq 0(%rbx), %r12 /* code pointer */ movq 0(%rbx), %r12 /* code pointer */
jmp LBL(caml_start_program) jmp LBL(caml_start_program)
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(G(caml_callback_exn)) ENDFUNCTION(G(caml_callback_asm))
FUNCTION(G(caml_callback2_exn)) FUNCTION(G(caml_callback2_asm))
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
PUSH_CALLEE_SAVE_REGS PUSH_CALLEE_SAVE_REGS
/* Initial loading of arguments */ /* Initial loading of arguments */
movq C_ARG_1, %rdi /* closure -- no op with Unix conventions */ movq C_ARG_1, %r14 /* Caml_state */
movq C_ARG_2, %rax /* first argument */ movq C_ARG_2, %rdi /* closure */
movq C_ARG_3, %rbx /* second argument */ movq 0(C_ARG_3), %rax /* first argument */
movq 8(C_ARG_3), %rbx /* second argument */
LEA_VAR(caml_apply2, %r12) /* code pointer */ LEA_VAR(caml_apply2, %r12) /* code pointer */
jmp LBL(caml_start_program) jmp LBL(caml_start_program)
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(G(caml_callback2_exn)) ENDFUNCTION(G(caml_callback2_asm))
FUNCTION(G(caml_callback3_exn)) FUNCTION(G(caml_callback3_asm))
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
PUSH_CALLEE_SAVE_REGS PUSH_CALLEE_SAVE_REGS
/* Initial loading of arguments */ /* Initial loading of arguments */
movq C_ARG_2, %rax /* first argument */ movq C_ARG_1, %r14 /* Caml_state */
movq C_ARG_3, %rbx /* second argument */ movq 0(C_ARG_3), %rax /* first argument */
movq C_ARG_1, %rsi /* closure */ movq 8(C_ARG_3), %rbx /* second argument */
movq C_ARG_4, %rdi /* third argument */ movq C_ARG_2, %rsi /* closure */
movq 16(C_ARG_3), %rdi /* third argument */
LEA_VAR(caml_apply3, %r12) /* code pointer */ LEA_VAR(caml_apply3, %r12) /* code pointer */
jmp LBL(caml_start_program) jmp LBL(caml_start_program)
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(G(caml_callback3_exn)) ENDFUNCTION(G(caml_callback3_asm))
FUNCTION(G(caml_ml_array_bound_error)) FUNCTION(G(caml_ml_array_bound_error))
CFI_STARTPROC CFI_STARTPROC

View File

@ -25,20 +25,14 @@
EXTRN caml_apply3: NEAR EXTRN caml_apply3: NEAR
EXTRN caml_program: NEAR EXTRN caml_program: NEAR
EXTRN caml_array_bound_error: NEAR EXTRN caml_array_bound_error: NEAR
EXTRN caml_young_limit: QWORD EXTRN caml_stash_backtrace: NEAR
EXTRN caml_young_ptr: QWORD
EXTRN caml_bottom_of_stack: QWORD
EXTRN caml_last_return_address: QWORD
EXTRN caml_gc_regs: QWORD
EXTRN caml_exception_pointer: QWORD
EXTRN caml_backtrace_pos: DWORD
EXTRN caml_backtrace_active: DWORD
EXTRN caml_stash_backtrace: NEAR
IFDEF WITH_SPACETIME IFDEF WITH_SPACETIME
EXTRN caml_spacetime_trie_node_ptr: QWORD EXTRN caml_spacetime_trie_node_ptr: QWORD
EXTRN caml_spacetime_c_to_ocaml: NEAR EXTRN caml_spacetime_c_to_ocaml: NEAR
ENDIF ENDIF
INCLUDE domain_state64.inc
.CODE .CODE
PUBLIC caml_system__code_begin PUBLIC caml_system__code_begin
@ -53,22 +47,21 @@ caml_system__code_begin:
caml_call_gc: caml_call_gc:
; Record lowest stack address and return address ; Record lowest stack address and return address
mov rax, [rsp] mov rax, [rsp]
mov caml_last_return_address, rax Store_last_return_address rax
lea rax, [rsp+8] lea rax, [rsp+8]
mov caml_bottom_of_stack, rax Store_bottom_of_stack rax
L105: L105:
; Touch the stack to trigger a recoverable segfault ; Touch the stack to trigger a recoverable segfault
; if insufficient space remains ; if insufficient space remains
sub rsp, 01000h sub rsp, 01000h
mov [rsp], rax mov [rsp], rax
add rsp, 01000h add rsp, 01000h
; Save caml_young_ptr, caml_exception_pointer ; Save young_ptr
mov caml_young_ptr, r15 Store_young_ptr r15
mov caml_exception_pointer, r14
IFDEF WITH_SPACETIME IFDEF WITH_SPACETIME
mov caml_spacetime_trie_node_ptr, r13 mov caml_spacetime_trie_node_ptr, r13
ENDIF ENDIF
; Build array of registers, save it into caml_gc_regs ; Build array of registers, save it into Caml_state(gc_regs)
push rbp push rbp
push r11 push r11
push r10 push r10
@ -82,7 +75,7 @@ ENDIF
push rdi push rdi
push rbx push rbx
push rax push rax
mov caml_gc_regs, rsp Store_gc_regs rsp
; Save floating-point registers ; Save floating-point registers
sub rsp, 16*8 sub rsp, 16*8
movsd QWORD PTR [rsp + 0*8], xmm0 movsd QWORD PTR [rsp + 0*8], xmm0
@ -136,9 +129,8 @@ ENDIF
pop r10 pop r10
pop r11 pop r11
pop rbp pop rbp
; Restore caml_young_ptr, caml_exception_pointer ; Restore Caml_state(young_ptr)
mov r15, caml_young_ptr Load_young_ptr r15
mov r14, caml_exception_pointer
; Return to caller ; Return to caller
ret ret
@ -146,15 +138,15 @@ ENDIF
ALIGN 16 ALIGN 16
caml_alloc1: caml_alloc1:
sub r15, 16 sub r15, 16
cmp r15, caml_young_limit Cmp_young_limit r15
jb L100 jb L100
ret ret
L100: L100:
add r15, 16 add r15, 16
mov rax, [rsp + 0] mov rax, [rsp + 0]
mov caml_last_return_address, rax Store_last_return_address rax
lea rax, [rsp + 8] lea rax, [rsp + 8]
mov caml_bottom_of_stack, rax Store_bottom_of_stack rax
sub rsp, 8 sub rsp, 8
call L105 call L105
add rsp, 8 add rsp, 8
@ -164,15 +156,15 @@ L100:
ALIGN 16 ALIGN 16
caml_alloc2: caml_alloc2:
sub r15, 24 sub r15, 24
cmp r15, caml_young_limit Cmp_young_limit r15
jb L101 jb L101
ret ret
L101: L101:
add r15, 24 add r15, 24
mov rax, [rsp + 0] mov rax, [rsp + 0]
mov caml_last_return_address, rax Store_last_return_address rax
lea rax, [rsp + 8] lea rax, [rsp + 8]
mov caml_bottom_of_stack, rax Store_bottom_of_stack rax
sub rsp, 8 sub rsp, 8
call L105 call L105
add rsp, 8 add rsp, 8
@ -182,15 +174,15 @@ L101:
ALIGN 16 ALIGN 16
caml_alloc3: caml_alloc3:
sub r15, 32 sub r15, 32
cmp r15, caml_young_limit Cmp_young_limit r15
jb L102 jb L102
ret ret
L102: L102:
add r15, 32 add r15, 32
mov rax, [rsp + 0] mov rax, [rsp + 0]
mov caml_last_return_address, rax Store_last_return_address rax
lea rax, [rsp + 8] lea rax, [rsp + 8]
mov caml_bottom_of_stack, rax Store_bottom_of_stack rax
sub rsp, 8 sub rsp, 8
call L105 call L105
add rsp, 8 add rsp, 8
@ -200,16 +192,16 @@ L102:
ALIGN 16 ALIGN 16
caml_allocN: caml_allocN:
sub r15, rax sub r15, rax
cmp r15, caml_young_limit Cmp_young_limit r15
jb L103 jb L103
ret ret
L103: L103:
add r15, rax add r15, rax
push rax ; save desired size push rax ; save desired size
mov rax, [rsp + 8] mov rax, [rsp + 8]
mov caml_last_return_address, rax Store_last_return_address rax
lea rax, [rsp + 16] lea rax, [rsp + 16]
mov caml_bottom_of_stack, rax Store_bottom_of_stack rax
call L105 call L105
pop rax ; recover desired size pop rax ; recover desired size
jmp caml_allocN jmp caml_allocN
@ -241,11 +233,11 @@ caml_call_gc3:
caml_c_call: caml_c_call:
; Record lowest stack address and return address ; Record lowest stack address and return address
pop r12 pop r12
mov caml_last_return_address, r12 Store_last_return_address r12
mov caml_bottom_of_stack, rsp Store_bottom_of_stack rsp
IFDEF WITH_SPACETIME IFDEF WITH_SPACETIME
; Record the trie node hole pointer that corresponds to ; Record the trie node hole pointer that corresponds to
; [caml_last_return_address] ; [Caml_state(last_return_address)]
mov caml_spacetime_trie_node_ptr, r13 mov caml_spacetime_trie_node_ptr, r13
ENDIF ENDIF
; Touch the stack to trigger a recoverable segfault ; Touch the stack to trigger a recoverable segfault
@ -253,13 +245,12 @@ ENDIF
sub rsp, 01000h sub rsp, 01000h
mov [rsp], rax mov [rsp], rax
add rsp, 01000h add rsp, 01000h
; Make the exception handler and alloc ptr available to the C code ; Make the alloc ptr available to the C code
mov caml_young_ptr, r15 Store_young_ptr r15
mov caml_exception_pointer, r14
; Call the function (address in rax) ; Call the function (address in rax)
call rax call rax
; Reload alloc ptr ; Reload alloc ptr
mov r15, caml_young_ptr Load_young_ptr r15
; Return to caller ; Return to caller
push r12 push r12
ret ret
@ -289,6 +280,8 @@ caml_start_program:
movapd OWORD PTR [rsp + 7*16], xmm13 movapd OWORD PTR [rsp + 7*16], xmm13
movapd OWORD PTR [rsp + 8*16], xmm14 movapd OWORD PTR [rsp + 8*16], xmm14
movapd OWORD PTR [rsp + 9*16], xmm15 movapd OWORD PTR [rsp + 9*16], xmm15
; First argument (rcx) is Caml_state. Load it in r14
mov r14, rcx
; Initial entry point is caml_program ; Initial entry point is caml_program
lea r12, caml_program lea r12, caml_program
; Common code for caml_start_program and caml_callback* ; Common code for caml_start_program and caml_callback*
@ -299,9 +292,9 @@ IFDEF WITH_SPACETIME
ELSE ELSE
sub rsp, 8 ; stack 16-aligned sub rsp, 8 ; stack 16-aligned
ENDIF ENDIF
push caml_gc_regs Push_gc_regs
push caml_last_return_address Push_last_return_address
push caml_bottom_of_stack Push_bottom_of_stack
IFDEF WITH_SPACETIME IFDEF WITH_SPACETIME
; Save arguments to caml_callback ; Save arguments to caml_callback
push rax push rax
@ -317,14 +310,13 @@ IFDEF WITH_SPACETIME
pop rbx pop rbx
pop rax pop rax
ENDIF ENDIF
; Setup alloc ptr and exception ptr ; Setup alloc ptr
mov r15, caml_young_ptr Load_young_ptr r15
mov r14, caml_exception_pointer
; Build an exception handler ; Build an exception handler
lea r13, L108 lea r13, L108
push r13 push r13
push r14 Push_exception_pointer
mov r14, rsp Store_exception_pointer rsp
IFDEF WITH_SPACETIME IFDEF WITH_SPACETIME
mov r13, caml_spacetime_trie_node_ptr mov r13, caml_spacetime_trie_node_ptr
ENDIF ENDIF
@ -332,16 +324,15 @@ ENDIF
call r12 call r12
L107: L107:
; Pop the exception handler ; Pop the exception handler
pop r14 Pop_exception_pointer
pop r12 ; dummy register pop r12 ; dummy register
L109: L109:
; Update alloc ptr and exception ptr ; Update alloc ptr
mov caml_young_ptr, r15 Store_young_ptr r15
mov caml_exception_pointer, r14
; Pop the callback restoring, link the global variables ; Pop the callback restoring, link the global variables
pop caml_bottom_of_stack Pop_bottom_of_stack
pop caml_last_return_address Pop_last_return_address
pop caml_gc_regs Pop_gc_regs
IFDEF WITH_SPACETIME IFDEF WITH_SPACETIME
pop caml_spacetime_trie_node_ptr pop caml_spacetime_trie_node_ptr
ELSE ELSE
@ -380,22 +371,25 @@ L108:
PUBLIC caml_raise_exn PUBLIC caml_raise_exn
ALIGN 16 ALIGN 16
caml_raise_exn: caml_raise_exn:
test caml_backtrace_active, 1 Load_backtrace_active r11
test r11, 1
jne L110 jne L110
mov rsp, r14 ; Cut stack Load_exception_pointer rsp ; Cut stack
pop r14 ; Recover previous exception handler ; Recover previous exception handler
ret ; Branch to handler Pop_exception_pointer
ret ; Branch to handler
L110: L110:
mov r12, rax ; Save exception bucket in r12 mov r12, rax ; Save exception bucket in r12
mov rcx, rax ; Arg 1: exception bucket mov rcx, rax ; Arg 1: exception bucket
mov rdx, [rsp] ; Arg 2: PC of raise mov rdx, [rsp] ; Arg 2: PC of raise
lea r8, [rsp+8] ; Arg 3: SP of raise lea r8, [rsp+8] ; Arg 3: SP of raise
mov r9, r14 ; Arg 4: SP of handler Load_exception_pointer r9 ; Arg 4: SP of handler
sub rsp, 32 ; Reserve 32 bytes on stack sub rsp, 32 ; Reserve 32 bytes on stack
call caml_stash_backtrace call caml_stash_backtrace
mov rax, r12 ; Recover exception bucket mov rax, r12 ; Recover exception bucket
mov rsp, r14 ; Cut stack Load_exception_pointer rsp ; Cut stack
pop r14 ; Recover previous exception handler ; Recover previous exception handler
Pop_exception_pointer
ret ; Branch to handler ret ; Branch to handler
; Raise an exception from C ; Raise an exception from C
@ -403,32 +397,36 @@ L110:
PUBLIC caml_raise_exception PUBLIC caml_raise_exception
ALIGN 16 ALIGN 16
caml_raise_exception: caml_raise_exception:
test caml_backtrace_active, 1 mov r14, rcx ; First argument is Caml_state
Load_backtrace_active r11
test r11, 1
jne L112 jne L112
mov rax, rcx ; First argument is exn bucket mov rax, rdx ; Second argument is exn bucket
mov rsp, caml_exception_pointer Load_exception_pointer rsp
pop r14 ; Recover previous exception handler ; Recover previous exception handler
mov r15, caml_young_ptr ; Reload alloc ptr Pop_exception_pointer
Load_young_ptr r15 ; Reload alloc ptr
ret ret
L112: L112:
mov r12, rcx ; Save exception bucket in r12 mov r12, rdx ; Save exception bucket in r12
; Arg 1: exception bucket mov rcx, rdx ; Arg 1: exception bucket
mov rdx, caml_last_return_address ; Arg 2: PC of raise Load_last_return_address rdx ; Arg 2: PC of raise
mov r8, caml_bottom_of_stack ; Arg 3: SP of raise Load_bottom_of_stack r8 ; Arg 3: SP of raise
mov r9, caml_exception_pointer ; Arg 4: SP of handler Load_exception_pointer r9 ; Arg 4: SP of handler
sub rsp, 32 ; Reserve 32 bytes on stack sub rsp, 32 ; Reserve 32 bytes on stack
call caml_stash_backtrace call caml_stash_backtrace
mov rax, r12 ; Recover exception bucket mov rax, r12 ; Recover exception bucket
mov rsp, caml_exception_pointer Load_exception_pointer rsp
pop r14 ; Recover previous exception handler ; Recover previous exception handler
mov r15, caml_young_ptr ; Reload alloc ptr Pop_exception_pointer
Load_young_ptr r15; Reload alloc ptr
ret ret
; Callback from C to OCaml ; Callback from C to OCaml
PUBLIC caml_callback_exn PUBLIC caml_callback_asm
ALIGN 16 ALIGN 16
caml_callback_exn: caml_callback_asm:
; Save callee-save registers ; Save callee-save registers
push rbx push rbx
push rbp push rbp
@ -450,14 +448,15 @@ caml_callback_exn:
movapd OWORD PTR [rsp + 8*16], xmm14 movapd OWORD PTR [rsp + 8*16], xmm14
movapd OWORD PTR [rsp + 9*16], xmm15 movapd OWORD PTR [rsp + 9*16], xmm15
; Initial loading of arguments ; Initial loading of arguments
mov rbx, rcx ; closure mov r14, rcx ; Caml_state
mov rax, rdx ; argument mov rbx, rdx ; closure
mov rax, [r8] ; argument
mov r12, [rbx] ; code pointer mov r12, [rbx] ; code pointer
jmp L106 jmp L106
PUBLIC caml_callback2_exn PUBLIC caml_callback2_asm
ALIGN 16 ALIGN 16
caml_callback2_exn: caml_callback2_asm:
; Save callee-save registers ; Save callee-save registers
push rbx push rbx
push rbp push rbp
@ -479,15 +478,16 @@ caml_callback2_exn:
movapd OWORD PTR [rsp + 8*16], xmm14 movapd OWORD PTR [rsp + 8*16], xmm14
movapd OWORD PTR [rsp + 9*16], xmm15 movapd OWORD PTR [rsp + 9*16], xmm15
; Initial loading of arguments ; Initial loading of arguments
mov rdi, rcx ; closure mov r14, rcx ; Caml_state
mov rax, rdx ; first argument mov rdi, rdx ; closure
mov rbx, r8 ; second argument mov rax, [r8] ; first argument
mov rbx, [r8 + 8] ; second argument
lea r12, caml_apply2 ; code pointer lea r12, caml_apply2 ; code pointer
jmp L106 jmp L106
PUBLIC caml_callback3_exn PUBLIC caml_callback3_asm
ALIGN 16 ALIGN 16
caml_callback3_exn: caml_callback3_asm:
; Save callee-save registers ; Save callee-save registers
push rbx push rbx
push rbp push rbp
@ -509,10 +509,11 @@ caml_callback3_exn:
movapd OWORD PTR [rsp + 8*16], xmm14 movapd OWORD PTR [rsp + 8*16], xmm14
movapd OWORD PTR [rsp + 9*16], xmm15 movapd OWORD PTR [rsp + 9*16], xmm15
; Initial loading of arguments ; Initial loading of arguments
mov rsi, rcx ; closure mov r14, rcx ; Caml_state
mov rax, rdx ; first argument mov rsi, rdx ; closure
mov rbx, r8 ; second argument mov rax, [r8] ; first argument
mov rdi, r9 ; third argument mov rbx, [r8 + 8] ; second argument
mov rdi, [r8 + 16] ; third argument
lea r12, caml_apply3 ; code pointer lea r12, caml_apply3 ; code pointer
jmp L106 jmp L106

View File

@ -79,9 +79,9 @@
.endm .endm
#endif #endif
trap_ptr .req r8 trap_ptr .req r8
alloc_ptr .req r10 alloc_ptr .req r10
alloc_limit .req r11 domain_state_ptr .req r11
/* Support for CFI directives */ /* Support for CFI directives */
@ -122,6 +122,15 @@ caml_hot__code_begin:
caml_hot__code_end: caml_hot__code_end:
#endif #endif
.set domain_curr_field, 0
#define DOMAIN_STATE(c_type, name) \
.equ domain_field_caml_##name, domain_curr_field ; \
.set domain_curr_field, domain_curr_field + 1
#include "../runtime/caml/domain_state.tbl"
#undef DOMAIN_STATE
#define Caml_state(var) [domain_state_ptr, 8*domain_field_caml_##var]
/* Allocation functions and GC interface */ /* Allocation functions and GC interface */
.globl caml_system__code_begin .globl caml_system__code_begin
caml_system__code_begin: caml_system__code_begin:
@ -129,12 +138,10 @@ caml_system__code_begin:
FUNCTION(caml_call_gc) FUNCTION(caml_call_gc)
CFI_STARTPROC CFI_STARTPROC
/* Record return address */ /* Record return address */
ldr r12, =caml_last_return_address str lr, Caml_state(last_return_address)
str lr, [r12]
.Lcaml_call_gc: .Lcaml_call_gc:
/* Record lowest stack address */ /* Record lowest stack address */
ldr r12, =caml_bottom_of_stack str sp, Caml_state(bottom_of_stack)
str sp, [r12]
#if defined(SYS_linux_eabihf) || defined(SYS_netbsd) #if defined(SYS_linux_eabihf) || defined(SYS_netbsd)
/* Save caller floating-point registers on the stack */ /* Save caller floating-point registers on the stack */
vpush {d0-d7}; CFI_ADJUST(64) vpush {d0-d7}; CFI_ADJUST(64)
@ -147,14 +154,11 @@ FUNCTION(caml_call_gc)
CFI_OFFSET(lr, -4) CFI_OFFSET(lr, -4)
#endif #endif
/* Store pointer to saved integer registers in caml_gc_regs */ /* Store pointer to saved integer registers in caml_gc_regs */
ldr r12, =caml_gc_regs str sp, Caml_state(gc_regs)
str sp, [r12]
/* Save current allocation pointer for debugging purposes */ /* Save current allocation pointer for debugging purposes */
ldr alloc_limit, =caml_young_ptr str alloc_ptr, Caml_state(young_ptr)
str alloc_ptr, [alloc_limit]
/* Save trap pointer in case an exception is raised during GC */ /* Save trap pointer in case an exception is raised during GC */
ldr r12, =caml_exception_pointer str trap_ptr, Caml_state(exception_pointer)
str trap_ptr, [r12]
/* Call the garbage collector */ /* Call the garbage collector */
bl caml_garbage_collection bl caml_garbage_collection
/* Restore integer registers and return address from the stack */ /* Restore integer registers and return address from the stack */
@ -163,11 +167,8 @@ FUNCTION(caml_call_gc)
/* Restore floating-point registers from the stack */ /* Restore floating-point registers from the stack */
vpop {d0-d7}; CFI_ADJUST(-64) vpop {d0-d7}; CFI_ADJUST(-64)
#endif #endif
/* Reload new allocation pointer and limit */ /* Reload new allocation pointer */
/* alloc_limit still points to caml_young_ptr */ ldr alloc_ptr, Caml_state(young_ptr)
ldr r12, =caml_young_limit
ldr alloc_ptr, [alloc_limit]
ldr alloc_limit, [r12]
/* Return to caller */ /* Return to caller */
bx lr bx lr
CFI_ENDPROC CFI_ENDPROC
@ -177,17 +178,17 @@ FUNCTION(caml_alloc1)
CFI_STARTPROC CFI_STARTPROC
.Lcaml_alloc1: .Lcaml_alloc1:
sub alloc_ptr, alloc_ptr, 8 sub alloc_ptr, alloc_ptr, 8
cmp alloc_ptr, alloc_limit ldr r7, Caml_state(young_limit)
cmp alloc_ptr, r7
bcc 1f bcc 1f
bx lr bx lr
1: add alloc_ptr, alloc_ptr, 8 1: add alloc_ptr, alloc_ptr, 8
/* Record return address */ /* Record return address */
ldr r7, =caml_last_return_address str lr, Caml_state(last_return_address)
str lr, [r7] /* Call GC */
/* Call GC (preserves r7) */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
ldr lr, [r7] ldr lr, Caml_state(last_return_address)
/* Try again */ /* Try again */
b .Lcaml_alloc1 b .Lcaml_alloc1
CFI_ENDPROC CFI_ENDPROC
@ -197,17 +198,17 @@ FUNCTION(caml_alloc2)
CFI_STARTPROC CFI_STARTPROC
.Lcaml_alloc2: .Lcaml_alloc2:
sub alloc_ptr, alloc_ptr, 12 sub alloc_ptr, alloc_ptr, 12
cmp alloc_ptr, alloc_limit ldr r7, Caml_state(young_limit)
cmp alloc_ptr, r7
bcc 1f bcc 1f
bx lr bx lr
1: add alloc_ptr, alloc_ptr, 12 1: add alloc_ptr, alloc_ptr, 12
/* Record return address */ /* Record return address */
ldr r7, =caml_last_return_address str lr, Caml_state(last_return_address)
str lr, [r7] /* Call GC */
/* Call GC (preserves r7) */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
ldr lr, [r7] ldr lr, Caml_state(last_return_address)
/* Try again */ /* Try again */
b .Lcaml_alloc2 b .Lcaml_alloc2
CFI_ENDPROC CFI_ENDPROC
@ -217,17 +218,17 @@ FUNCTION(caml_alloc3)
CFI_STARTPROC CFI_STARTPROC
.Lcaml_alloc3: .Lcaml_alloc3:
sub alloc_ptr, alloc_ptr, 16 sub alloc_ptr, alloc_ptr, 16
cmp alloc_ptr, alloc_limit ldr r7, Caml_state(young_limit)
cmp alloc_ptr, r7
bcc 1f bcc 1f
bx lr bx lr
1: add alloc_ptr, alloc_ptr, 16 1: add alloc_ptr, alloc_ptr, 16
/* Record return address */ /* Record return address */
ldr r7, =caml_last_return_address str lr, Caml_state(last_return_address)
str lr, [r7] /* Call GC */
/* Call GC (preserves r7) */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
ldr lr, [r7] ldr lr, Caml_state(last_return_address)
/* Try again */ /* Try again */
b .Lcaml_alloc3 b .Lcaml_alloc3
CFI_ENDPROC CFI_ENDPROC
@ -237,18 +238,17 @@ FUNCTION(caml_allocN)
CFI_STARTPROC CFI_STARTPROC
.Lcaml_allocN: .Lcaml_allocN:
sub alloc_ptr, alloc_ptr, r7 sub alloc_ptr, alloc_ptr, r7
cmp alloc_ptr, alloc_limit ldr r12, Caml_state(young_limit)
cmp alloc_ptr, r12
bcc 1f bcc 1f
bx lr bx lr
1: add alloc_ptr, alloc_ptr, r7 1: add alloc_ptr, alloc_ptr, r7
/* Record return address */ /* Record return address */
ldr r12, =caml_last_return_address str lr, Caml_state(last_return_address)
str lr, [r12]
/* Call GC (preserves r7) */ /* Call GC (preserves r7) */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
ldr r12, =caml_last_return_address ldr lr, Caml_state(last_return_address)
ldr lr, [r12]
/* Try again */ /* Try again */
b .Lcaml_allocN b .Lcaml_allocN
CFI_ENDPROC CFI_ENDPROC
@ -260,24 +260,18 @@ FUNCTION(caml_allocN)
FUNCTION(caml_c_call) FUNCTION(caml_c_call)
CFI_STARTPROC CFI_STARTPROC
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
ldr r5, =caml_last_return_address str lr, Caml_state(last_return_address)
ldr r6, =caml_bottom_of_stack str sp, Caml_state(bottom_of_stack)
str lr, [r5]
str sp, [r6]
/* Preserve return address in callee-save register r4 */ /* Preserve return address in callee-save register r4 */
mov r4, lr mov r4, lr
CFI_REGISTER(lr, r4) CFI_REGISTER(lr, r4)
/* Make the exception handler alloc ptr available to the C code */ /* Make the exception handler alloc ptr available to the C code */
ldr r5, =caml_young_ptr str alloc_ptr, Caml_state(young_ptr)
ldr r6, =caml_exception_pointer str trap_ptr, Caml_state(exception_pointer)
str alloc_ptr, [r5]
str trap_ptr, [r6]
/* Call the function */ /* Call the function */
blx r7 blx r7
/* Reload alloc ptr and alloc limit */ /* Reload alloc ptr */
ldr r6, =caml_young_limit ldr alloc_ptr, Caml_state(young_ptr)
ldr alloc_ptr, [r5] /* r5 still points to caml_young_ptr */
ldr alloc_limit, [r6]
/* Return */ /* Return */
bx r4 bx r4
CFI_ENDPROC CFI_ENDPROC
@ -305,53 +299,43 @@ FUNCTION(caml_start_program)
#else #else
CFI_OFFSET(lr, -4) CFI_OFFSET(lr, -4)
#endif #endif
ldr domain_state_ptr, =Caml_state
ldr domain_state_ptr, [domain_state_ptr]
/* Setup a callback link on the stack */ /* Setup a callback link on the stack */
sub sp, sp, 16; CFI_ADJUST(16) /* 8-byte alignment */ sub sp, sp, 16; CFI_ADJUST(16) /* 8-byte alignment */
ldr r4, =caml_bottom_of_stack ldr r4, Caml_state(bottom_of_stack)
ldr r5, =caml_last_return_address ldr r5, Caml_state(last_return_address)
ldr r6, =caml_gc_regs ldr r6, Caml_state(gc_regs)
ldr r4, [r4]
ldr r5, [r5]
ldr r6, [r6]
str r4, [sp, 0] str r4, [sp, 0]
str r5, [sp, 4] str r5, [sp, 4]
str r6, [sp, 8] str r6, [sp, 8]
/* Setup a trap frame to catch exceptions escaping the OCaml code */ /* Setup a trap frame to catch exceptions escaping the OCaml code */
sub sp, sp, 8; CFI_ADJUST(8) sub sp, sp, 8; CFI_ADJUST(8)
ldr r6, =caml_exception_pointer
ldr r5, =.Ltrap_handler ldr r5, =.Ltrap_handler
ldr r4, [r6] ldr r4, Caml_state(exception_pointer)
str r4, [sp, 0] str r4, [sp, 0]
str r5, [sp, 4] str r5, [sp, 4]
mov trap_ptr, sp mov trap_ptr, sp
/* Reload allocation pointers */ /* Reload allocation pointer */
ldr r4, =caml_young_ptr ldr alloc_ptr, Caml_state(young_ptr)
ldr alloc_ptr, [r4]
ldr r4, =caml_young_limit
ldr alloc_limit, [r4]
/* Call the OCaml code */ /* Call the OCaml code */
blx r12 blx r12
.Lcaml_retaddr: .Lcaml_retaddr:
/* Pop the trap frame, restoring caml_exception_pointer */ /* Pop the trap frame, restoring caml_exception_pointer */
ldr r4, =caml_exception_pointer
ldr r5, [sp, 0] ldr r5, [sp, 0]
str r5, [r4] str r5, Caml_state(exception_pointer)
add sp, sp, 8; CFI_ADJUST(-8) add sp, sp, 8; CFI_ADJUST(-8)
/* Pop the callback link, restoring the global variables */ /* Pop the callback link, restoring the global variables */
.Lreturn_result: .Lreturn_result:
ldr r4, =caml_bottom_of_stack
ldr r5, [sp, 0] ldr r5, [sp, 0]
str r5, [r4] str r5, Caml_state(bottom_of_stack)
ldr r4, =caml_last_return_address
ldr r5, [sp, 4] ldr r5, [sp, 4]
str r5, [r4] str r5, Caml_state(last_return_address)
ldr r4, =caml_gc_regs
ldr r5, [sp, 8] ldr r5, [sp, 8]
str r5, [r4] str r5, Caml_state(gc_regs)
add sp, sp, 16; CFI_ADJUST(-16) add sp, sp, 16; CFI_ADJUST(-16)
/* Update allocation pointer */ /* Update allocation pointer */
ldr r4, =caml_young_ptr str alloc_ptr, Caml_state(young_ptr)
str alloc_ptr, [r4]
/* Reload callee-save registers and return address */ /* Reload callee-save registers and return address */
pop {r4-r8,r10,r11,lr}; CFI_ADJUST(-32) pop {r4-r8,r10,r11,lr}; CFI_ADJUST(-32)
#if defined(SYS_linux_eabihf) || defined(SYS_netbsd) #if defined(SYS_linux_eabihf) || defined(SYS_netbsd)
@ -370,8 +354,7 @@ FUNCTION(caml_start_program)
.Ltrap_handler: .Ltrap_handler:
CFI_STARTPROC CFI_STARTPROC
/* Save exception pointer */ /* Save exception pointer */
ldr r12, =caml_exception_pointer str trap_ptr, Caml_state(exception_pointer)
str trap_ptr, [r12]
/* Encode exception bucket as an exception result */ /* Encode exception bucket as an exception result */
orr r0, r0, 2 orr r0, r0, 2
/* Return it */ /* Return it */
@ -385,8 +368,7 @@ FUNCTION(caml_start_program)
FUNCTION(caml_raise_exn) FUNCTION(caml_raise_exn)
CFI_STARTPROC CFI_STARTPROC
/* Test if backtrace is active */ /* Test if backtrace is active */
ldr r1, =caml_backtrace_active ldr r1, Caml_state(backtrace_active)
ldr r1, [r1]
cbz r1, 1f cbz r1, 1f
/* Preserve exception bucket in callee-save register r4 */ /* Preserve exception bucket in callee-save register r4 */
mov r4, r0 mov r4, r0
@ -408,24 +390,21 @@ FUNCTION(caml_raise_exn)
FUNCTION(caml_raise_exception) FUNCTION(caml_raise_exception)
CFI_STARTPROC CFI_STARTPROC
/* Reload trap ptr, alloc ptr and alloc limit */ /* Load the domain state ptr */
ldr trap_ptr, =caml_exception_pointer mov domain_state_ptr, r0
ldr alloc_ptr, =caml_young_ptr /* Load exception bucket */
ldr alloc_limit, =caml_young_limit mov r0, r1
ldr trap_ptr, [trap_ptr] /* Reload trap ptr and alloc ptr */
ldr alloc_ptr, [alloc_ptr] ldr trap_ptr, Caml_state(exception_pointer)
ldr alloc_limit, [alloc_limit] ldr alloc_ptr, Caml_state(young_ptr)
/* Test if backtrace is active */ /* Test if backtrace is active */
ldr r1, =caml_backtrace_active ldr r1, Caml_state(backtrace_active)
ldr r1, [r1]
cbz r1, 1f cbz r1, 1f
/* Preserve exception bucket in callee-save register r4 */ /* Preserve exception bucket in callee-save register r4 */
mov r4, r0 mov r4, r0
ldr r1, =caml_last_return_address /* arg2: pc of raise */ ldr r1, Caml_state(last_return_address) /* arg2: pc of raise */
ldr r1, [r1] ldr r2, Caml_state(bottom_of_stack) /* arg3: sp of raise */
ldr r2, =caml_bottom_of_stack /* arg3: sp of raise */ mov r3, trap_ptr /* arg4: sp of handler */
ldr r2, [r2]
mov r3, trap_ptr /* arg4: sp of handler */
bl caml_stash_backtrace bl caml_stash_backtrace
/* Restore exception bucket */ /* Restore exception bucket */
mov r0, r4 mov r0, r4
@ -438,42 +417,43 @@ FUNCTION(caml_raise_exception)
/* Callback from C to OCaml */ /* Callback from C to OCaml */
FUNCTION(caml_callback_exn) FUNCTION(caml_callback_asm)
CFI_STARTPROC CFI_STARTPROC
/* Initial shuffling of arguments (r0 = closure, r1 = first arg) */ /* Initial shuffling of arguments */
mov r12, r0 /* (r0 = Caml_state, r1 = closure, [r2] = first arg) */
mov r0, r1 /* r0 = first arg */ ldr r0, [r2] /* r0 = first arg */
mov r1, r12 /* r1 = closure environment */ /* r1 = closure environment */
ldr r12, [r12] /* code pointer */ ldr r12, [r1] /* code pointer */
b .Ljump_to_caml b .Ljump_to_caml
CFI_ENDPROC CFI_ENDPROC
.size caml_callback_exn, .-caml_callback_exn .size caml_callback_asm, .-caml_callback_asm
FUNCTION(caml_callback2_exn) FUNCTION(caml_callback2_asm)
CFI_STARTPROC CFI_STARTPROC
/* Initial shuffling of arguments (r0 = closure, r1 = arg1, r2 = arg2) */ /* Initial shuffling of arguments */
mov r12, r0 /* (r0 = Caml_state, r1 = closure, [r2] = arg1, [r2,4] = arg2) */
mov r0, r1 /* r0 = first arg */ mov r12, r1
mov r1, r2 /* r1 = second arg */ ldr r0, [r2] /* r0 = first arg */
mov r2, r12 /* r2 = closure environment */ ldr r1, [r2,4] /* r1 = second arg */
mov r2, r12 /* r2 = closure environment */
ldr r12, =caml_apply2 ldr r12, =caml_apply2
b .Ljump_to_caml b .Ljump_to_caml
CFI_ENDPROC CFI_ENDPROC
.size caml_callback2_exn, .-caml_callback2_exn .size caml_callback2_asm, .-caml_callback2_asm
FUNCTION(caml_callback3_exn) FUNCTION(caml_callback3_asm)
CFI_STARTPROC CFI_STARTPROC
/* Initial shuffling of arguments */ /* Initial shuffling of arguments */
/* (r0 = closure, r1 = arg1, r2 = arg2, r3 = arg3) */ /* (r0 = Caml_state, r1 = closure, [r2] = arg1, [r2,4] = arg2,
mov r12, r0 [r2,8] = arg3) */
mov r0, r1 /* r0 = first arg */ mov r3, r1 /* r3 = closure environment */
mov r1, r2 /* r1 = second arg */ ldr r0, [r2] /* r0 = first arg */
mov r2, r3 /* r2 = third arg */ ldr r1, [r2,4] /* r1 = second arg */
mov r3, r12 /* r3 = closure environment */ ldr r2, [r2,8] /* r2 = third arg */
ldr r12, =caml_apply3 ldr r12, =caml_apply3
b .Ljump_to_caml b .Ljump_to_caml
CFI_ENDPROC CFI_ENDPROC
.size caml_callback3_exn, .-caml_callback3_exn .size caml_callback3_asm, .-caml_callback3_asm
FUNCTION(caml_ml_array_bound_error) FUNCTION(caml_ml_array_bound_error)
CFI_STARTPROC CFI_STARTPROC

View File

@ -20,12 +20,19 @@
/* Special registers */ /* Special registers */
#define DOMAIN_STATE_PTR x25
#define TRAP_PTR x26 #define TRAP_PTR x26
#define ALLOC_PTR x27 #define ALLOC_PTR x27
#define ALLOC_LIMIT x28 #define ALLOC_LIMIT x28
#define ARG x15 #define ARG x15
#define TMP x16 #define TMP x16
#define TMP2 x17 #define TMP2 x17
#define ARG_DOMAIN_STATE_PTR x18
#define C_ARG_1 x0
#define C_ARG_2 x1
#define C_ARG_3 x2
#define C_ARG_4 x3
/* Support for CFI directives */ /* Support for CFI directives */
@ -43,44 +50,26 @@
#define CFI_OFFSET(r,n) #define CFI_OFFSET(r,n)
#endif #endif
/* Macros to load and store global variables. Destroy TMP2 */ .set domain_curr_field, 0
#define DOMAIN_STATE(c_type, name) \
.equ domain_field_caml_##name, domain_curr_field ; \
.set domain_curr_field, domain_curr_field + 1
#include "../runtime/caml/domain_state.tbl"
#undef DOMAIN_STATE
#define Caml_state(var) [x25, 8*domain_field_caml_##var]
#if defined(__PIC__) #if defined(__PIC__)
#define ADDRGLOBAL(reg,symb) \ #define ADDRGLOBAL(reg,symb) \
adrp TMP2, :got:symb; \ adrp TMP2, :got:symb; \
ldr reg, [TMP2, #:got_lo12:symb] ldr reg, [TMP2, #:got_lo12:symb]
#define LOADGLOBAL(reg,symb) \
ADDRGLOBAL(TMP2,symb); \
ldr reg, [TMP2]
#define STOREGLOBAL(reg,symb) \
ADDRGLOBAL(TMP2,symb); \
str reg, [TMP2]
#define LOADGLOBAL32(reg,symb) \
ADDRGLOBAL(TMP2,symb); \
ldrsw reg, [TMP2]
#else #else
#define ADDRGLOBAL(reg,symb) \ #define ADDRGLOBAL(reg,symb) \
adrp reg, symb; \ adrp reg, symb; \
add reg, reg, #:lo12:symb add reg, reg, #:lo12:symb
#define LOADGLOBAL(reg,symb) \
adrp TMP2, symb; \
ldr reg, [TMP2, #:lo12:symb]
#define STOREGLOBAL(reg,symb) \
adrp TMP2, symb; \
str reg, [TMP2, #:lo12:symb]
#define LOADGLOBAL32(reg,symb) \
adrp TMP2, symb; \
ldrsw reg, [TMP2, #:lo12:symb]
#endif #endif
#if defined(FUNCTION_SECTIONS) #if defined(FUNCTION_SECTIONS)
@ -113,10 +102,10 @@ caml_system__code_begin:
FUNCTION(caml_call_gc) FUNCTION(caml_call_gc)
CFI_STARTPROC CFI_STARTPROC
/* Record return address */ /* Record return address */
STOREGLOBAL(x30, caml_last_return_address) str x30, Caml_state(last_return_address)
/* Record lowest stack address */ /* Record lowest stack address */
mov TMP, sp mov TMP, sp
STOREGLOBAL(TMP, caml_bottom_of_stack) str TMP, Caml_state(bottom_of_stack)
.Lcaml_call_gc: .Lcaml_call_gc:
/* Set up stack space, saving return address and frame pointer */ /* Set up stack space, saving return address and frame pointer */
/* (2 regs RA/GP, 24 allocatable int regs, 24 caller-save float regs) * 8 */ /* (2 regs RA/GP, 24 allocatable int regs, 24 caller-save float regs) * 8 */
@ -155,11 +144,11 @@ FUNCTION(caml_call_gc)
stp d30, d31, [sp, 384] stp d30, d31, [sp, 384]
/* Store pointer to saved integer registers in caml_gc_regs */ /* Store pointer to saved integer registers in caml_gc_regs */
add TMP, sp, #16 add TMP, sp, #16
STOREGLOBAL(TMP, caml_gc_regs) str TMP, Caml_state(gc_regs)
/* Save current allocation pointer for debugging purposes */ /* Save current allocation pointer for debugging purposes */
STOREGLOBAL(ALLOC_PTR, caml_young_ptr) str ALLOC_PTR, Caml_state(young_ptr)
/* Save trap pointer in case an exception is raised during GC */ /* Save trap pointer in case an exception is raised during GC */
STOREGLOBAL(TRAP_PTR, caml_exception_pointer) str TRAP_PTR, Caml_state(exception_pointer)
/* Call the garbage collector */ /* Call the garbage collector */
bl caml_garbage_collection bl caml_garbage_collection
/* Restore registers */ /* Restore registers */
@ -188,8 +177,8 @@ FUNCTION(caml_call_gc)
ldp d28, d29, [sp, 368] ldp d28, d29, [sp, 368]
ldp d30, d31, [sp, 384] ldp d30, d31, [sp, 384]
/* Reload new allocation pointer and allocation limit */ /* Reload new allocation pointer and allocation limit */
LOADGLOBAL(ALLOC_PTR, caml_young_ptr) ldr ALLOC_PTR, Caml_state(young_ptr)
LOADGLOBAL(ALLOC_LIMIT, caml_young_limit) ldr ALLOC_LIMIT, Caml_state(young_limit)
/* Free stack space and return to caller */ /* Free stack space and return to caller */
ldp x29, x30, [sp], 400 ldp x29, x30, [sp], 400
ret ret
@ -212,10 +201,10 @@ FUNCTION(caml_alloc1)
frame won't match the frame size contained in the relevant frame frame won't match the frame size contained in the relevant frame
descriptor. */ descriptor. */
add x29, sp, #16 add x29, sp, #16
STOREGLOBAL(x29, caml_bottom_of_stack) str x29, Caml_state(bottom_of_stack)
add x29, sp, #0 add x29, sp, #0
/* Record return address */ /* Record return address */
STOREGLOBAL(x30, caml_last_return_address) str x30, Caml_state(last_return_address)
/* Call GC */ /* Call GC */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
@ -241,10 +230,10 @@ caml_alloc2:
/* Record the lowest address of the caller's stack frame. /* Record the lowest address of the caller's stack frame.
See comment above. */ See comment above. */
add x29, sp, #16 add x29, sp, #16
STOREGLOBAL(x29, caml_bottom_of_stack) str x29, Caml_state(bottom_of_stack)
add x29, sp, #0 add x29, sp, #0
/* Record return address */ /* Record return address */
STOREGLOBAL(x30, caml_last_return_address) str x30, Caml_state(last_return_address)
/* Call GC */ /* Call GC */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
@ -268,10 +257,10 @@ FUNCTION(caml_alloc3)
/* Record the lowest address of the caller's stack frame. /* Record the lowest address of the caller's stack frame.
See comment above. */ See comment above. */
add x29, sp, #16 add x29, sp, #16
STOREGLOBAL(x29, caml_bottom_of_stack) str x29, Caml_state(bottom_of_stack)
add x29, sp, #0 add x29, sp, #0
/* Record return address */ /* Record return address */
STOREGLOBAL(x30, caml_last_return_address) str x30, Caml_state(last_return_address)
/* Call GC */ /* Call GC */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
@ -298,10 +287,10 @@ caml_allocN:
/* Record the lowest address of the caller's stack frame. /* Record the lowest address of the caller's stack frame.
See comment above. */ See comment above. */
add x29, sp, #16 add x29, sp, #16
STOREGLOBAL(x29, caml_bottom_of_stack) str x29, Caml_state(bottom_of_stack)
add x29, sp, #0 add x29, sp, #0
/* Record return address */ /* Record return address */
STOREGLOBAL(x30, caml_last_return_address) str x30, Caml_state(last_return_address)
/* Call GC. This preserves ARG */ /* Call GC. This preserves ARG */
bl .Lcaml_call_gc bl .Lcaml_call_gc
/* Restore return address */ /* Restore return address */
@ -321,17 +310,17 @@ FUNCTION(caml_c_call)
mov x19, x30 mov x19, x30
CFI_REGISTER(30, 19) CFI_REGISTER(30, 19)
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
STOREGLOBAL(x30, caml_last_return_address) str x30, Caml_state(last_return_address)
add TMP, sp, #0 add TMP, sp, #0
STOREGLOBAL(TMP, caml_bottom_of_stack) str TMP, Caml_state(bottom_of_stack)
/* Make the exception handler alloc ptr available to the C code */ /* Make the exception handler alloc ptr available to the C code */
STOREGLOBAL(ALLOC_PTR, caml_young_ptr) str ALLOC_PTR, Caml_state(young_ptr)
STOREGLOBAL(TRAP_PTR, caml_exception_pointer) str TRAP_PTR, Caml_state(exception_pointer)
/* Call the function */ /* Call the function */
blr ARG blr ARG
/* Reload alloc ptr and alloc limit */ /* Reload alloc ptr and alloc limit */
LOADGLOBAL(ALLOC_PTR, caml_young_ptr) ldr ALLOC_PTR, Caml_state(young_ptr)
LOADGLOBAL(ALLOC_LIMIT, caml_young_limit) ldr ALLOC_LIMIT, Caml_state(young_limit)
/* Return */ /* Return */
ret x19 ret x19
CFI_ENDPROC CFI_ENDPROC
@ -341,6 +330,7 @@ FUNCTION(caml_c_call)
FUNCTION(caml_start_program) FUNCTION(caml_start_program)
CFI_STARTPROC CFI_STARTPROC
mov ARG_DOMAIN_STATE_PTR, C_ARG_1
ADDRGLOBAL(ARG, caml_program) ADDRGLOBAL(ARG, caml_program)
/* Code shared with caml_callback* */ /* Code shared with caml_callback* */
@ -363,39 +353,41 @@ FUNCTION(caml_start_program)
stp d10, d11, [sp, 112] stp d10, d11, [sp, 112]
stp d12, d13, [sp, 128] stp d12, d13, [sp, 128]
stp d14, d15, [sp, 144] stp d14, d15, [sp, 144]
/* Load domain state pointer from argument */
mov DOMAIN_STATE_PTR, ARG_DOMAIN_STATE_PTR
/* Setup a callback link on the stack */ /* Setup a callback link on the stack */
LOADGLOBAL(x8, caml_bottom_of_stack) ldr x8, Caml_state(bottom_of_stack)
LOADGLOBAL(x9, caml_last_return_address) ldr x9, Caml_state(last_return_address)
LOADGLOBAL(x10, caml_gc_regs) ldr x10, Caml_state(gc_regs)
stp x8, x9, [sp, -32]! /* 16-byte alignment */ stp x8, x9, [sp, -32]! /* 16-byte alignment */
CFI_ADJUST(32) CFI_ADJUST(32)
str x10, [sp, 16] str x10, [sp, 16]
/* Setup a trap frame to catch exceptions escaping the OCaml code */ /* Setup a trap frame to catch exceptions escaping the OCaml code */
LOADGLOBAL(x8, caml_exception_pointer) ldr x8, Caml_state(exception_pointer)
adr x9, .Ltrap_handler adr x9, .Ltrap_handler
stp x8, x9, [sp, -16]! stp x8, x9, [sp, -16]!
CFI_ADJUST(16) CFI_ADJUST(16)
add TRAP_PTR, sp, #0 add TRAP_PTR, sp, #0
/* Reload allocation pointers */ /* Reload allocation pointers */
LOADGLOBAL(ALLOC_PTR, caml_young_ptr) ldr ALLOC_PTR, Caml_state(young_ptr)
LOADGLOBAL(ALLOC_LIMIT, caml_young_limit) ldr ALLOC_LIMIT, Caml_state(young_limit)
/* Call the OCaml code */ /* Call the OCaml code */
blr ARG blr ARG
.Lcaml_retaddr: .Lcaml_retaddr:
/* Pop the trap frame, restoring caml_exception_pointer */ /* Pop the trap frame, restoring caml_exception_pointer */
ldr x8, [sp], 16 ldr x8, [sp], 16
CFI_ADJUST(-16) CFI_ADJUST(-16)
STOREGLOBAL(x8, caml_exception_pointer) str x8, Caml_state(exception_pointer)
/* Pop the callback link, restoring the global variables */ /* Pop the callback link, restoring the global variables */
.Lreturn_result: .Lreturn_result:
ldr x10, [sp, 16] ldr x10, [sp, 16]
ldp x8, x9, [sp], 32 ldp x8, x9, [sp], 32
CFI_ADJUST(-32) CFI_ADJUST(-32)
STOREGLOBAL(x8, caml_bottom_of_stack) str x8, Caml_state(bottom_of_stack)
STOREGLOBAL(x9, caml_last_return_address) str x9, Caml_state(last_return_address)
STOREGLOBAL(x10, caml_gc_regs) str x10, Caml_state(gc_regs)
/* Update allocation pointer */ /* Update allocation pointer */
STOREGLOBAL(ALLOC_PTR, caml_young_ptr) str ALLOC_PTR, Caml_state(young_ptr)
/* Reload callee-save registers and return address */ /* Reload callee-save registers and return address */
ldp x19, x20, [sp, 16] ldp x19, x20, [sp, 16]
ldp x21, x22, [sp, 32] ldp x21, x22, [sp, 32]
@ -421,7 +413,7 @@ FUNCTION(caml_start_program)
.Ltrap_handler: .Ltrap_handler:
CFI_STARTPROC CFI_STARTPROC
/* Save exception pointer */ /* Save exception pointer */
STOREGLOBAL(TRAP_PTR, caml_exception_pointer) str TRAP_PTR, Caml_state(exception_pointer)
/* Encode exception bucket as an exception result */ /* Encode exception bucket as an exception result */
orr x0, x0, #2 orr x0, x0, #2
/* Return it */ /* Return it */
@ -435,8 +427,8 @@ FUNCTION(caml_start_program)
FUNCTION(caml_raise_exn) FUNCTION(caml_raise_exn)
CFI_STARTPROC CFI_STARTPROC
/* Test if backtrace is active */ /* Test if backtrace is active */
LOADGLOBAL32(TMP, caml_backtrace_active) ldr TMP, Caml_state(backtrace_active)
cbnz TMP, 2f cbnz TMP, 2f
1: /* Cut stack at current trap handler */ 1: /* Cut stack at current trap handler */
mov sp, TRAP_PTR mov sp, TRAP_PTR
/* Pop previous handler and jump to it */ /* Pop previous handler and jump to it */
@ -461,12 +453,16 @@ FUNCTION(caml_raise_exn)
FUNCTION(caml_raise_exception) FUNCTION(caml_raise_exception)
CFI_STARTPROC CFI_STARTPROC
/* Load the domain state ptr */
mov DOMAIN_STATE_PTR, C_ARG_1
/* Load the exception bucket */
mov x0, C_ARG_2
/* Reload trap ptr, alloc ptr and alloc limit */ /* Reload trap ptr, alloc ptr and alloc limit */
LOADGLOBAL(TRAP_PTR, caml_exception_pointer) ldr TRAP_PTR, Caml_state(exception_pointer)
LOADGLOBAL(ALLOC_PTR, caml_young_ptr) ldr ALLOC_PTR, Caml_state(young_ptr)
LOADGLOBAL(ALLOC_LIMIT, caml_young_limit) ldr ALLOC_LIMIT, Caml_state(young_limit)
/* Test if backtrace is active */ /* Test if backtrace is active */
LOADGLOBAL32(TMP, caml_backtrace_active) ldr TMP, Caml_state(backtrace_active)
cbnz TMP, 2f cbnz TMP, 2f
1: /* Cut stack at current trap handler */ 1: /* Cut stack at current trap handler */
mov sp, TRAP_PTR mov sp, TRAP_PTR
@ -477,9 +473,9 @@ FUNCTION(caml_raise_exception)
2: /* Preserve exception bucket in callee-save register x19 */ 2: /* Preserve exception bucket in callee-save register x19 */
mov x19, x0 mov x19, x0
/* Stash the backtrace */ /* Stash the backtrace */
/* arg1: exn bucket, already in x0 */ /* arg1: exn bucket */
LOADGLOBAL(x1, caml_last_return_address) /* arg2: pc of raise */ ldr x1, Caml_state(last_return_address) /* arg2: pc of raise */
LOADGLOBAL(x2, caml_bottom_of_stack) /* arg3: sp of raise */ ldr x2, Caml_state(bottom_of_stack) /* arg3: sp of raise */
mov x3, TRAP_PTR /* arg4: sp of handler */ mov x3, TRAP_PTR /* arg4: sp of handler */
bl caml_stash_backtrace bl caml_stash_backtrace
/* Restore exception bucket and raise */ /* Restore exception bucket and raise */
@ -490,50 +486,52 @@ FUNCTION(caml_raise_exception)
/* Callback from C to OCaml */ /* Callback from C to OCaml */
FUNCTION(caml_callback_exn) FUNCTION(caml_callback_asm)
CFI_STARTPROC CFI_STARTPROC
/* Initial shuffling of arguments (x0 = closure, x1 = first arg) */ /* Initial shuffling of arguments */
mov TMP, x0 /* (x0 = Caml_state, x1 = closure, [x2] = first arg) */
mov x0, x1 /* x0 = first arg */ mov ARG_DOMAIN_STATE_PTR, x0
mov x1, TMP /* x1 = closure environment */ ldr x0, [x2] /* x0 = first arg */
ldr ARG, [TMP] /* code pointer */ /* x1 = closure environment */
ldr ARG, [x1] /* code pointer */
b .Ljump_to_caml b .Ljump_to_caml
CFI_ENDPROC CFI_ENDPROC
.type caml_callback_exn, %function .type caml_callback_asm, %function
.size caml_callback_exn, .-caml_callback_exn .size caml_callback_asm, .-caml_callback_asm
TEXT_SECTION(caml_callback2_exn) TEXT_SECTION(caml_callback2_asm)
.align 2 .align 2
.globl caml_callback2_exn .globl caml_callback2_asm
caml_callback2_exn: caml_callback2_asm:
CFI_STARTPROC CFI_STARTPROC
/* Initial shuffling of arguments (x0 = closure, x1 = arg1, x2 = arg2) */ /* Initial shuffling of arguments */
mov TMP, x0 /* (x0 = Caml_state, x1 = closure, [x2] = arg1, [x2,8] = arg2) */
mov x0, x1 /* x0 = first arg */ mov ARG_DOMAIN_STATE_PTR, x0
mov x1, x2 /* x1 = second arg */ mov TMP, x1
ldp x0, x1, [x2, 0] /* x0 = first arg, x1 = second arg */
mov x2, TMP /* x2 = closure environment */ mov x2, TMP /* x2 = closure environment */
ADDRGLOBAL(ARG, caml_apply2) ADDRGLOBAL(ARG, caml_apply2)
b .Ljump_to_caml b .Ljump_to_caml
CFI_ENDPROC CFI_ENDPROC
.type caml_callback2_exn, %function .type caml_callback2_asm, %function
.size caml_callback2_exn, .-caml_callback2_exn .size caml_callback2_asm, .-caml_callback2_asm
TEXT_SECTION(caml_callback3_exn) TEXT_SECTION(caml_callback3_asm)
.align 2 .align 2
.globl caml_callback3_exn .globl caml_callback3_asm
caml_callback3_exn: caml_callback3_asm:
CFI_STARTPROC CFI_STARTPROC
/* Initial shuffling of arguments */ /* Initial shuffling of arguments */
/* (x0 = closure, x1 = arg1, x2 = arg2, x3 = arg3) */ /* (x0 = Caml_state, x1 = closure, [x2] = arg1, [x2,8] = arg2,
mov TMP, x0 [x2,16] = arg3) */
mov x0, x1 /* x0 = first arg */ mov ARG_DOMAIN_STATE_PTR, x0
mov x1, x2 /* x1 = second arg */ mov x3, x1 /* x3 = closure environment */
mov x2, x3 /* x2 = third arg */ ldp x0, x1, [x2, 0] /* x0 = first arg, x1 = second arg */
mov x3, TMP /* x3 = closure environment */ ldr x2, [x2, 16] /* x2 = third arg */
ADDRGLOBAL(ARG, caml_apply3) ADDRGLOBAL(ARG, caml_apply3)
b .Ljump_to_caml b .Ljump_to_caml
CFI_ENDPROC CFI_ENDPROC
.size caml_callback3_exn, .-caml_callback3_exn .size caml_callback3_asm, .-caml_callback3_asm
FUNCTION(caml_ml_array_bound_error) FUNCTION(caml_ml_array_bound_error)
CFI_STARTPROC CFI_STARTPROC

View File

@ -625,7 +625,8 @@ CAMLprim value caml_array_fill(value array,
if (Is_young(old)) continue; if (Is_young(old)) continue;
if (caml_gc_phase == Phase_mark) caml_darken(old, NULL); if (caml_gc_phase == Phase_mark) caml_darken(old, NULL);
} }
if (is_val_young_block) add_to_ref_table (&caml_ref_table, fp); if (is_val_young_block)
add_to_ref_table (Caml_state->ref_table, fp);
} }
if (is_val_young_block) caml_check_urgent_gc (Val_unit); if (is_val_young_block) caml_check_urgent_gc (Val_unit);
} }

View File

@ -31,14 +31,9 @@
/* The table of debug information fragments */ /* The table of debug information fragments */
struct ext_table caml_debug_info; struct ext_table caml_debug_info;
CAMLexport int32_t caml_backtrace_active = 0;
CAMLexport int32_t caml_backtrace_pos = 0;
CAMLexport backtrace_slot * caml_backtrace_buffer = NULL;
CAMLexport value caml_backtrace_last_exn = Val_unit;
void caml_init_backtrace(void) void caml_init_backtrace(void)
{ {
caml_register_global_root(&caml_backtrace_last_exn); caml_register_global_root(&Caml_state->backtrace_last_exn);
} }
/* Start or stop the backtrace machinery */ /* Start or stop the backtrace machinery */
@ -46,14 +41,14 @@ CAMLprim value caml_record_backtrace(value vflag)
{ {
int flag = Int_val(vflag); int flag = Int_val(vflag);
if (flag != caml_backtrace_active) { if (flag != Caml_state->backtrace_active) {
caml_backtrace_active = flag; Caml_state->backtrace_active = flag;
caml_backtrace_pos = 0; Caml_state->backtrace_pos = 0;
caml_backtrace_last_exn = Val_unit; Caml_state->backtrace_last_exn = Val_unit;
/* Note: We do lazy initialization of caml_backtrace_buffer when /* Note: We do lazy initialization of Caml_state->backtrace_buffer when
needed in order to simplify the interface with the thread needed in order to simplify the interface with the thread
library (thread creation doesn't need to allocate library (thread creation doesn't need to allocate
caml_backtrace_buffer). So we don't have to allocate it here. Caml_state->backtrace_buffer). So we don't have to allocate it here.
*/ */
} }
return Val_unit; return Val_unit;
@ -62,7 +57,7 @@ CAMLprim value caml_record_backtrace(value vflag)
/* Return the status of the backtrace machinery */ /* Return the status of the backtrace machinery */
CAMLprim value caml_backtrace_status(value vunit) CAMLprim value caml_backtrace_status(value vunit)
{ {
return Val_bool(caml_backtrace_active); return Val_bool(Caml_state->backtrace_active);
} }
/* Print location information -- same behavior as in Printexc /* Print location information -- same behavior as in Printexc
@ -120,8 +115,8 @@ CAMLexport void caml_print_exception_backtrace(void)
return; return;
} }
for (i = 0; i < caml_backtrace_pos; i++) { for (i = 0; i < Caml_state->backtrace_pos; i++) {
for (dbg = caml_debuginfo_extract(caml_backtrace_buffer[i]); for (dbg = caml_debuginfo_extract(Caml_state->backtrace_buffer[i]);
dbg != NULL; dbg != NULL;
dbg = caml_debuginfo_next(dbg)) dbg = caml_debuginfo_next(dbg))
{ {
@ -142,28 +137,28 @@ CAMLprim value caml_get_exception_raw_backtrace(value unit)
if the finalizer raises then catches an exception). We choose to ignore if the finalizer raises then catches an exception). We choose to ignore
any such finalizer backtraces and return the original one. */ any such finalizer backtraces and return the original one. */
if (!caml_backtrace_active || if (!Caml_state->backtrace_active ||
caml_backtrace_buffer == NULL || Caml_state->backtrace_buffer == NULL ||
caml_backtrace_pos == 0) { Caml_state->backtrace_pos == 0) {
res = caml_alloc(0, 0); res = caml_alloc(0, 0);
} }
else { else {
backtrace_slot saved_caml_backtrace_buffer[BACKTRACE_BUFFER_SIZE]; backtrace_slot saved_backtrace_buffer[BACKTRACE_BUFFER_SIZE];
int saved_caml_backtrace_pos; int saved_backtrace_pos;
intnat i; intnat i;
saved_caml_backtrace_pos = caml_backtrace_pos; saved_backtrace_pos = Caml_state->backtrace_pos;
if (saved_caml_backtrace_pos > BACKTRACE_BUFFER_SIZE) { if (saved_backtrace_pos > BACKTRACE_BUFFER_SIZE) {
saved_caml_backtrace_pos = BACKTRACE_BUFFER_SIZE; saved_backtrace_pos = BACKTRACE_BUFFER_SIZE;
} }
memcpy(saved_caml_backtrace_buffer, caml_backtrace_buffer, memcpy(saved_backtrace_buffer, Caml_state->backtrace_buffer,
saved_caml_backtrace_pos * sizeof(backtrace_slot)); saved_backtrace_pos * sizeof(backtrace_slot));
res = caml_alloc(saved_caml_backtrace_pos, 0); res = caml_alloc(saved_backtrace_pos, 0);
for (i = 0; i < saved_caml_backtrace_pos; i++) { for (i = 0; i < saved_backtrace_pos; i++) {
Field(res, i) = Val_backtrace_slot(saved_caml_backtrace_buffer[i]); Field(res, i) = Val_backtrace_slot(saved_backtrace_buffer[i]);
} }
} }
@ -178,7 +173,7 @@ CAMLprim value caml_restore_raw_backtrace(value exn, value backtrace)
intnat i; intnat i;
mlsize_t bt_size; mlsize_t bt_size;
caml_backtrace_last_exn = exn; Caml_state->backtrace_last_exn = exn;
bt_size = Wosize_val(backtrace); bt_size = Wosize_val(backtrace);
if(bt_size > BACKTRACE_BUFFER_SIZE){ if(bt_size > BACKTRACE_BUFFER_SIZE){
@ -188,18 +183,19 @@ CAMLprim value caml_restore_raw_backtrace(value exn, value backtrace)
/* We don't allocate if the backtrace is empty (no -g or backtrace /* We don't allocate if the backtrace is empty (no -g or backtrace
not activated) */ not activated) */
if(bt_size == 0){ if(bt_size == 0){
caml_backtrace_pos = 0; Caml_state->backtrace_pos = 0;
return Val_unit; return Val_unit;
} }
/* Allocate if needed and copy the backtrace buffer */ /* Allocate if needed and copy the backtrace buffer */
if (caml_backtrace_buffer == NULL && caml_alloc_backtrace_buffer() == -1){ if (Caml_state->backtrace_buffer == NULL &&
caml_alloc_backtrace_buffer() == -1) {
return Val_unit; return Val_unit;
} }
caml_backtrace_pos = bt_size; Caml_state->backtrace_pos = bt_size;
for(i=0; i < caml_backtrace_pos; i++){ for(i=0; i < Caml_state->backtrace_pos; i++){
caml_backtrace_buffer[i] = Backtrace_slot_val(Field(backtrace, i)); Caml_state->backtrace_buffer[i] = Backtrace_slot_val(Field(backtrace, i));
} }
return Val_unit; return Val_unit;

View File

@ -223,10 +223,10 @@ CAMLprim value caml_remove_debug_info(code_t start)
} }
int caml_alloc_backtrace_buffer(void){ int caml_alloc_backtrace_buffer(void){
CAMLassert(caml_backtrace_pos == 0); CAMLassert(Caml_state->backtrace_pos == 0);
caml_backtrace_buffer = Caml_state->backtrace_buffer =
caml_stat_alloc_noexc(BACKTRACE_BUFFER_SIZE * sizeof(code_t)); caml_stat_alloc_noexc(BACKTRACE_BUFFER_SIZE * sizeof(code_t));
if (caml_backtrace_buffer == NULL) return -1; if (Caml_state->backtrace_buffer == NULL) return -1;
return 0; return 0;
} }
@ -236,26 +236,27 @@ int caml_alloc_backtrace_buffer(void){
void caml_stash_backtrace(value exn, code_t pc, value * sp, int reraise) void caml_stash_backtrace(value exn, code_t pc, value * sp, int reraise)
{ {
if (pc != NULL) pc = pc - 1; if (pc != NULL) pc = pc - 1;
if (exn != caml_backtrace_last_exn || !reraise) { if (exn != Caml_state->backtrace_last_exn || !reraise) {
caml_backtrace_pos = 0; Caml_state->backtrace_pos = 0;
caml_backtrace_last_exn = exn; Caml_state->backtrace_last_exn = exn;
} }
if (caml_backtrace_buffer == NULL && caml_alloc_backtrace_buffer() == -1) if (Caml_state->backtrace_buffer == NULL &&
caml_alloc_backtrace_buffer() == -1)
return; return;
if (caml_backtrace_pos >= BACKTRACE_BUFFER_SIZE) return; if (Caml_state->backtrace_pos >= BACKTRACE_BUFFER_SIZE) return;
/* testing the code region is needed: PR#8026 */ /* testing the code region is needed: PR#8026 */
if (find_debug_info(pc) != NULL) if (find_debug_info(pc) != NULL)
caml_backtrace_buffer[caml_backtrace_pos++] = pc; Caml_state->backtrace_buffer[Caml_state->backtrace_pos++] = pc;
/* Traverse the stack and put all values pointing into bytecode /* Traverse the stack and put all values pointing into bytecode
into the backtrace buffer. */ into the backtrace buffer. */
for (/*nothing*/; sp < caml_trapsp; sp++) { for (/*nothing*/; sp < Caml_state->trapsp; sp++) {
code_t p = (code_t) *sp; code_t p = (code_t) *sp;
if (caml_backtrace_pos >= BACKTRACE_BUFFER_SIZE) break; if (Caml_state->backtrace_pos >= BACKTRACE_BUFFER_SIZE) break;
if (find_debug_info(p) != NULL) if (find_debug_info(p) != NULL)
caml_backtrace_buffer[caml_backtrace_pos++] = p; Caml_state->backtrace_buffer[Caml_state->backtrace_pos++] = p;
} }
} }
@ -265,7 +266,7 @@ void caml_stash_backtrace(value exn, code_t pc, value * sp, int reraise)
code_t caml_next_frame_pointer(value ** sp, value ** trsp) code_t caml_next_frame_pointer(value ** sp, value ** trsp)
{ {
while (*sp < caml_stack_high) { while (*sp < Caml_state->stack_high) {
code_t *p = (code_t*) (*sp)++; code_t *p = (code_t*) (*sp)++;
if(&Trap_pc(*trsp) == p) { if(&Trap_pc(*trsp) == p) {
*trsp = Trap_link(*trsp); *trsp = Trap_link(*trsp);
@ -281,8 +282,8 @@ code_t caml_next_frame_pointer(value ** sp, value ** trsp)
intnat caml_current_callstack_size(intnat max_frames) intnat caml_current_callstack_size(intnat max_frames)
{ {
intnat trace_size; intnat trace_size;
value * sp = caml_extern_sp; value * sp = Caml_state->extern_sp;
value * trsp = caml_trapsp; value * trsp = Caml_state->trapsp;
for (trace_size = 0; trace_size < max_frames; trace_size++) { for (trace_size = 0; trace_size < max_frames; trace_size++) {
code_t p = caml_next_frame_pointer(&sp, &trsp); code_t p = caml_next_frame_pointer(&sp, &trsp);
@ -293,8 +294,8 @@ intnat caml_current_callstack_size(intnat max_frames)
} }
void caml_current_callstack_write(value trace) { void caml_current_callstack_write(value trace) {
value * sp = caml_extern_sp; value * sp = Caml_state->extern_sp;
value * trsp = caml_trapsp; value * trsp = Caml_state->trapsp;
uintnat trace_pos, trace_size = Wosize_val(trace); uintnat trace_pos, trace_size = Wosize_val(trace);
for (trace_pos = 0; trace_pos < trace_size; trace_pos++) { for (trace_pos = 0; trace_pos < trace_size; trace_pos++) {

View File

@ -66,10 +66,10 @@ frame_descr * caml_next_frame_descriptor(uintnat * pc, char ** sp)
} }
int caml_alloc_backtrace_buffer(void){ int caml_alloc_backtrace_buffer(void){
CAMLassert(caml_backtrace_pos == 0); CAMLassert(Caml_state->backtrace_pos == 0);
caml_backtrace_buffer = Caml_state->backtrace_buffer =
caml_stat_alloc_noexc(BACKTRACE_BUFFER_SIZE * sizeof(backtrace_slot)); caml_stat_alloc_noexc(BACKTRACE_BUFFER_SIZE * sizeof(backtrace_slot));
if (caml_backtrace_buffer == NULL) return -1; if (Caml_state->backtrace_buffer == NULL) return -1;
return 0; return 0;
} }
@ -81,12 +81,13 @@ int caml_alloc_backtrace_buffer(void){
[caml_get_current_callstack] was implemented. */ [caml_get_current_callstack] was implemented. */
void caml_stash_backtrace(value exn, uintnat pc, char * sp, char * trapsp) void caml_stash_backtrace(value exn, uintnat pc, char * sp, char * trapsp)
{ {
if (exn != caml_backtrace_last_exn) { if (exn != Caml_state->backtrace_last_exn) {
caml_backtrace_pos = 0; Caml_state->backtrace_pos = 0;
caml_backtrace_last_exn = exn; Caml_state->backtrace_last_exn = exn;
} }
if (caml_backtrace_buffer == NULL && caml_alloc_backtrace_buffer() == -1) if (Caml_state->backtrace_buffer == NULL &&
caml_alloc_backtrace_buffer() == -1)
return; return;
/* iterate on each frame */ /* iterate on each frame */
@ -94,8 +95,9 @@ void caml_stash_backtrace(value exn, uintnat pc, char * sp, char * trapsp)
frame_descr * descr = caml_next_frame_descriptor(&pc, &sp); frame_descr * descr = caml_next_frame_descriptor(&pc, &sp);
if (descr == NULL) return; if (descr == NULL) return;
/* store its descriptor in the backtrace buffer */ /* store its descriptor in the backtrace buffer */
if (caml_backtrace_pos >= BACKTRACE_BUFFER_SIZE) return; if (Caml_state->backtrace_pos >= BACKTRACE_BUFFER_SIZE) return;
caml_backtrace_buffer[caml_backtrace_pos++] = (backtrace_slot) descr; Caml_state->backtrace_buffer[Caml_state->backtrace_pos++] =
(backtrace_slot) descr;
/* Stop when we reach the current exception handler */ /* Stop when we reach the current exception handler */
if (sp > trapsp) return; if (sp > trapsp) return;
@ -104,8 +106,8 @@ void caml_stash_backtrace(value exn, uintnat pc, char * sp, char * trapsp)
intnat caml_current_callstack_size(intnat max_frames) { intnat caml_current_callstack_size(intnat max_frames) {
intnat trace_size = 0; intnat trace_size = 0;
uintnat pc = caml_last_return_address; uintnat pc = Caml_state->last_return_address;
char * sp = caml_bottom_of_stack; char * sp = Caml_state->bottom_of_stack;
while (1) { while (1) {
frame_descr * descr = caml_next_frame_descriptor(&pc, &sp); frame_descr * descr = caml_next_frame_descriptor(&pc, &sp);
@ -113,15 +115,15 @@ intnat caml_current_callstack_size(intnat max_frames) {
if (trace_size >= max_frames) break; if (trace_size >= max_frames) break;
++trace_size; ++trace_size;
if (sp > caml_top_of_stack) break; if (sp > Caml_state->top_of_stack) break;
} }
return trace_size; return trace_size;
} }
void caml_current_callstack_write(value trace) { void caml_current_callstack_write(value trace) {
uintnat pc = caml_last_return_address; uintnat pc = Caml_state->last_return_address;
char * sp = caml_bottom_of_stack; char * sp = Caml_state->bottom_of_stack;
intnat trace_pos, trace_size = Wosize_val(trace); intnat trace_pos, trace_size = Wosize_val(trace);
for (trace_pos = 0; trace_pos < trace_size; trace_pos++) { for (trace_pos = 0; trace_pos < trace_size; trace_pos++) {

View File

@ -209,7 +209,7 @@ CAMLexport int caml_ba_compare(value v1, value v2)
if (e1 < e2) return -1; \ if (e1 < e2) return -1; \
if (e1 > e2) return 1; \ if (e1 > e2) return 1; \
if (e1 != e2) { \ if (e1 != e2) { \
caml_compare_unordered = 1; \ Caml_state->compare_unordered = 1; \
if (e1 == e1) return 1; \ if (e1 == e1) return 1; \
if (e2 == e2) return -1; \ if (e2 == e2) return -1; \
} \ } \

View File

@ -19,6 +19,7 @@
#include <string.h> #include <string.h>
#include "caml/callback.h" #include "caml/callback.h"
#include "caml/domain.h"
#include "caml/fail.h" #include "caml/fail.h"
#include "caml/memory.h" #include "caml/memory.h"
#include "caml/mlvalues.h" #include "caml/mlvalues.h"
@ -71,22 +72,23 @@ CAMLexport value caml_callbackN_exn(value closure, int narg, value args[])
CAMLassert(narg + 4 <= 256); CAMLassert(narg + 4 <= 256);
caml_extern_sp -= narg + 4; Caml_state->extern_sp -= narg + 4;
for (i = 0; i < narg; i++) caml_extern_sp[i] = args[i]; /* arguments */ for (i = 0; i < narg; i++) Caml_state->extern_sp[i] = args[i]; /* arguments */
#ifndef LOCAL_CALLBACK_BYTECODE #ifndef LOCAL_CALLBACK_BYTECODE
caml_extern_sp[narg] = (value) (callback_code + 4); /* return address */ Caml_state->extern_sp[narg] = (value)(callback_code + 4); /* return address */
caml_extern_sp[narg + 1] = Val_unit; /* environment */ Caml_state->extern_sp[narg + 1] = Val_unit; /* environment */
caml_extern_sp[narg + 2] = Val_long(0); /* extra args */ Caml_state->extern_sp[narg + 2] = Val_long(0); /* extra args */
caml_extern_sp[narg + 3] = closure; Caml_state->extern_sp[narg + 3] = closure;
Init_callback(); Init_callback();
callback_code[1] = narg + 3; callback_code[1] = narg + 3;
callback_code[3] = narg; callback_code[3] = narg;
res = caml_interprete(callback_code, sizeof(callback_code)); res = caml_interprete(callback_code, sizeof(callback_code));
#else /*have LOCAL_CALLBACK_BYTECODE*/ #else /*have LOCAL_CALLBACK_BYTECODE*/
caml_extern_sp[narg] = (value) (local_callback_code + 4); /* return address */ /* return address */
caml_extern_sp[narg + 1] = Val_unit; /* environment */ Caml_state->extern_sp[narg] = (value) (local_callback_code + 4);
caml_extern_sp[narg + 2] = Val_long(0); /* extra args */ Caml_state->extern_sp[narg + 1] = Val_unit; /* environment */
caml_extern_sp[narg + 3] = closure; Caml_state->extern_sp[narg + 2] = Val_long(0); /* extra args */
Caml_state->extern_sp[narg + 3] = closure;
local_callback_code[0] = ACC; local_callback_code[0] = ACC;
local_callback_code[1] = narg + 3; local_callback_code[1] = narg + 3;
local_callback_code[2] = APPLY; local_callback_code[2] = APPLY;
@ -100,7 +102,7 @@ CAMLexport value caml_callbackN_exn(value closure, int narg, value args[])
res = caml_interprete(local_callback_code, sizeof(local_callback_code)); res = caml_interprete(local_callback_code, sizeof(local_callback_code));
caml_release_bytecode(local_callback_code, sizeof(local_callback_code)); caml_release_bytecode(local_callback_code, sizeof(local_callback_code));
#endif /*LOCAL_CALLBACK_BYTECODE*/ #endif /*LOCAL_CALLBACK_BYTECODE*/
if (Is_exception_result(res)) caml_extern_sp += narg + 4; /* PR#3419 */ if (Is_exception_result(res)) Caml_state->extern_sp += narg + 4; /* PR#3419 */
return res; return res;
} }
@ -131,7 +133,31 @@ CAMLexport value caml_callback3_exn(value closure,
#else #else
/* Native-code callbacks. caml_callback[123]_exn are implemented in asm. */ /* Native-code callbacks. */
typedef value (callback_stub)(caml_domain_state* state, value closure,
value* args);
callback_stub caml_callback_asm, caml_callback2_asm, caml_callback3_asm;
CAMLexport value caml_callback_exn(value closure, value arg)
{
return caml_callback_asm(Caml_state, closure, &arg);
}
CAMLexport value caml_callback2_exn(value closure, value arg1, value arg2)
{
value args[] = {arg1, arg2};
return caml_callback2_asm(Caml_state, closure, args);
}
CAMLexport value caml_callback3_exn(value closure,
value arg1, value arg2, value arg3)
{
value args[] = {arg1, arg2, arg3};
return caml_callback3_asm(Caml_state, closure, args);
}
CAMLexport value caml_callbackN_exn(value closure, int narg, value args[]) CAMLexport value caml_callbackN_exn(value closure, int narg, value args[])
{ {

View File

@ -27,7 +27,8 @@
#define Is_young(val) \ #define Is_young(val) \
(CAMLassert (Is_block (val)), \ (CAMLassert (Is_block (val)), \
(addr)(val) < (addr)caml_young_end && (addr)(val) > (addr)caml_young_start) (addr)(val) < (addr)Caml_state_field(young_end) && \
(addr)(val) > (addr)Caml_state_field(young_start))
#define Is_in_heap(a) (Classify_addr(a) & In_heap) #define Is_in_heap(a) (Classify_addr(a) & In_heap)
@ -46,7 +47,6 @@
/***********************************************************************/ /***********************************************************************/
/* The rest of this file is private and may change without notice. */ /* The rest of this file is private and may change without notice. */
extern value *caml_young_start, *caml_young_end;
extern char * caml_code_area_start, * caml_code_area_end; extern char * caml_code_area_start, * caml_code_area_end;
#define Not_in_heap 0 #define Not_in_heap 0

View File

@ -30,63 +30,49 @@
* *
* Backtrace generation is split in multiple steps. * Backtrace generation is split in multiple steps.
* The lowest-level one, done by [backtrace_byt.c] and * The lowest-level one, done by [backtrace_byt.c] and
* [backtrace_nat.c] just fills the [caml_backtrace_buffer] * [backtrace_nat.c] just fills the [Caml_state->backtrace_buffer]
* variable each time a frame is unwinded. * variable each time a frame is unwinded.
* At that point, we don't know whether the backtrace will be useful or not so * At that point, we don't know whether the backtrace will be useful or not so
* this code should be as fast as possible. * this code should be as fast as possible.
* *
* If the backtrace happens to be useful, later passes will read * If the backtrace happens to be useful, later passes will read
* [caml_backtrace_buffer] and turn it into a [raw_backtrace] and then a * [Caml_state->backtrace_buffer] and turn it into a [raw_backtrace] and then a
* [backtrace]. * [backtrace].
* This is done in [backtrace.c] and [stdlib/printexc.ml]. * This is done in [backtrace.c] and [stdlib/printexc.ml].
* *
* Content of buffers * Content of buffers
* ------------------ * ------------------
* *
* [caml_backtrace_buffer] (really cheap) * [Caml_state->backtrace_buffer] (really cheap)
* Backend and process image dependent, abstracted by C-type backtrace_slot. * Backend and process image dependent, abstracted by C-type backtrace_slot.
* [raw_backtrace] (cheap) * [raw_backtrace] (cheap)
* OCaml values of abstract type [Printexc.raw_backtrace_slot], * OCaml values of abstract type [Printexc.raw_backtrace_slot],
* still backend and process image dependent (unsafe to marshal). * still backend and process image dependent (unsafe to marshal).
* [backtrace] (more expensive) * [backtrace] (more expensive)
* OCaml values of algebraic data-type [Printexc.backtrace_slot] * OCaml values of algebraic data-type [Printexc.backtrace_slot]
*/ *
* [Caml_state->backtrace_active] is non zero iff backtraces are recorded.
/* Non zero iff backtraces are recorded. * This variable must be changed with [caml_record_backtrace].
* One should use to change this variable [caml_record_backtrace]. *
*/ * The [Caml_state->backtrace_buffer] and [Caml_state->backtrace_last_exn]
CAMLextern int caml_backtrace_active; * variables are valid only if [Caml_state->backtrace_active != 0].
/* The [backtrace_slot] type represents values stored in the
* [caml_backtrace_buffer]. In bytecode, it is the same as a
* [code_t], in native code it as a [frame_descr *]. The difference
* doesn't matter for code outside [backtrace_{byt,nat}.c],
* so it is just exposed as a [backtrace_slot].
*/
typedef void * backtrace_slot;
/* The [caml_backtrace_buffer] and [caml_backtrace_last_exn]
* variables are valid only if [caml_backtrace_active != 0].
* *
* They are part of the state specific to each thread, and threading libraries * They are part of the state specific to each thread, and threading libraries
* are responsible for copying them on context switch. * are responsible for copying them on context switch.
* See [otherlibs/systhreads/st_stubs.c] and [otherlibs/threads/scheduler.c]. * See [otherlibs/systhreads/st_stubs.c] and [otherlibs/threads/scheduler.c].
*/ *
*
/* [caml_backtrace_buffer] is filled by runtime when unwinding stack. * [Caml_state->backtrace_buffer] is filled by runtime when unwinding stack. It
* It is an array ranging from [0] to [caml_backtrace_pos - 1]. * is an array ranging from [0] to [Caml_state->backtrace_pos - 1].
* [caml_backtrace_pos] is always zero if [!caml_backtrace_active]. * [Caml_state->backtrace_pos] is always zero if
* [!Caml_state->backtrace_active].
* *
* Its maximum size is determined by [BACKTRACE_BUFFER_SIZE] from * Its maximum size is determined by [BACKTRACE_BUFFER_SIZE] from
* [backtrace_prim.h], but this shouldn't affect users. * [backtrace_prim.h], but this shouldn't affect users.
*/ *
CAMLextern backtrace_slot * caml_backtrace_buffer; * [Caml_state->backtrace_last_exn] stores the last exception value that was
CAMLextern int caml_backtrace_pos; * raised, iff [Caml_state->backtrace_active != 0]. It is tested for equality
* to determine whether a raise is a re-raise of the same exception.
/* [caml_backtrace_last_exn] stores the last exception value that was raised,
* iff [caml_backtrace_active != 0].
* It is tested for equality to determine whether a raise is a re-raise of the
* same exception.
* *
* FIXME: this shouldn't matter anymore. Since OCaml 4.02, non-parameterized * FIXME: this shouldn't matter anymore. Since OCaml 4.02, non-parameterized
* exceptions are constant, so physical equality is no longer appropriate. * exceptions are constant, so physical equality is no longer appropriate.
@ -95,7 +81,6 @@ CAMLextern int caml_backtrace_pos;
* interpreter; * interpreter;
* - directly resetting [caml_backtrace_pos] to 0 in native runtimes for raise. * - directly resetting [caml_backtrace_pos] to 0 in native runtimes for raise.
*/ */
CAMLextern value caml_backtrace_last_exn;
/* [caml_record_backtrace] toggle backtrace recording on and off. /* [caml_record_backtrace] toggle backtrace recording on and off.
* This function can be called at runtime by user-code, or during * This function can be called at runtime by user-code, or during

View File

@ -22,6 +22,13 @@
#define caml_stat_top_heap_size Bsize_wsize(caml_stat_top_heap_wsz) #define caml_stat_top_heap_size Bsize_wsize(caml_stat_top_heap_wsz)
#define caml_stat_heap_size Bsize_wsize(caml_stat_heap_wsz) #define caml_stat_heap_size Bsize_wsize(caml_stat_heap_wsz)
/* global variables moved to Caml_state in 4.10.0 */
#define caml_young_start (Caml_state->young_start)
#define caml_young_end (Caml_state->young_end)
#define caml_young_ptr (Caml_state->young_ptr)
#define caml_young_limit (Caml_state->young_limit)
#define caml_local_roots (Caml_state->local_roots)
#ifndef CAML_NAME_SPACE #ifndef CAML_NAME_SPACE
/* /*
@ -235,10 +242,10 @@
/* **** meta.c */ /* **** meta.c */
/* **** minor_gc.c */ /* **** minor_gc.c */
#define young_start caml_young_start #define young_start (Caml_state->_young_start)
#define young_end caml_young_end #define young_end (Caml_state->_young_end)
#define young_ptr caml_young_ptr #define young_ptr (Caml_state->_young_ptr)
#define young_limit caml_young_limit #define young_limit (Caml_state->_young_limit)
#define ref_table caml_ref_table #define ref_table caml_ref_table
#define minor_collection caml_minor_collection #define minor_collection caml_minor_collection
#define check_urgent_gc caml_check_urgent_gc #define check_urgent_gc caml_check_urgent_gc
@ -255,7 +262,7 @@
#define format_caml_exception caml_format_exception /*SP*/ #define format_caml_exception caml_format_exception /*SP*/
/* **** roots.c */ /* **** roots.c */
#define local_roots caml_local_roots #define local_roots (Caml_state->_local_roots)
#define scan_roots_hook caml_scan_roots_hook #define scan_roots_hook caml_scan_roots_hook
#define do_local_roots caml_do_local_roots #define do_local_roots caml_do_local_roots

View File

@ -47,6 +47,8 @@
#include "compatibility.h" #include "compatibility.h"
#endif #endif
#ifndef CAML_CONFIG_H_NO_TYPEDEFS
#include <stddef.h> #include <stddef.h>
#if defined(HAS_LOCALE_H) || defined(HAS_XLOCALE_H) #if defined(HAS_LOCALE_H) || defined(HAS_XLOCALE_H)
@ -139,6 +141,8 @@ typedef uint64_t uintnat;
#error "No integer type available to represent pointers" #error "No integer type available to represent pointers"
#endif #endif
#endif /* CAML_CONFIG_H_NO_TYPEDEFS */
/* Endianness of floats */ /* Endianness of floats */
/* ARCH_FLOAT_ENDIANNESS encodes the byte order of doubles as follows: /* ARCH_FLOAT_ENDIANNESS encodes the byte order of doubles as follows:

36
runtime/caml/domain.h Normal file
View File

@ -0,0 +1,36 @@
/**************************************************************************/
/* */
/* OCaml */
/* */
/* KC Sivaramakrishnan, Indian Institute of Technology, Madras */
/* Stephen Dolan, University of Cambridge */
/* */
/* Copyright 2019 Indian Institute of Technology, Madras */
/* Copyright 2019 University of Cambridge */
/* */
/* All rights reserved. This file is distributed under the terms of */
/* the GNU Lesser General Public License version 2.1, with the */
/* special exception on linking described in the file LICENSE. */
/* */
/**************************************************************************/
#ifndef CAML_DOMAIN_H
#define CAML_DOMAIN_H
#ifdef __cplusplus
extern "C" {
#endif
#ifdef CAML_INTERNALS
#include "domain_state.h"
void caml_init_domain(void);
#endif /* CAML_INTERNALS */
#ifdef __cplusplus
}
#endif
#endif /* CAML_DOMAIN_H */

View File

@ -0,0 +1,58 @@
/**************************************************************************/
/* */
/* OCaml */
/* */
/* KC Sivaramakrishnan, Indian Institute of Technology, Madras */
/* Stephen Dolan, University of Cambridge */
/* */
/* Copyright 2019 Indian Institute of Technology, Madras */
/* Copyright 2019 University of Cambridge */
/* */
/* All rights reserved. This file is distributed under the terms of */
/* the GNU Lesser General Public License version 2.1, with the */
/* special exception on linking described in the file LICENSE. */
/* */
/**************************************************************************/
#ifndef CAML_STATE_H
#define CAML_STATE_H
#include <stddef.h>
#include "misc.h"
#include "mlvalues.h"
/* This structure sits in the TLS area and is also accessed efficiently
* via native code, which is why the indices are important */
typedef struct {
#ifdef CAML_NAME_SPACE
#define DOMAIN_STATE(type, name) CAMLalign(8) type name;
#else
#define DOMAIN_STATE(type, name) CAMLalign(8) type _##name;
#endif
#include "domain_state.tbl"
#undef DOMAIN_STATE
} caml_domain_state;
enum {
Domain_state_num_fields =
#define DOMAIN_STATE(type, name) + 1
#include "domain_state.tbl"
#undef DOMAIN_STATE
};
/* Check that the structure was laid out without padding,
since the runtime assumes this in computing offsets */
CAML_STATIC_ASSERT(
sizeof(caml_domain_state) ==
(Domain_state_num_fields
) * 8);
CAMLextern caml_domain_state* Caml_state;
#ifdef CAML_NAME_SPACE
#define Caml_state_field(field) Caml_state->field
#else
#define Caml_state_field(field) Caml_state->_##field
#endif
#endif /* CAML_STATE_H */

View File

@ -0,0 +1,75 @@
/**************************************************************************/
/* */
/* OCaml */
/* */
/* KC Sivaramakrishnan, Indian Institute of Technology, Madras */
/* Stephen Dolan, University of Cambridge */
/* */
/* Copyright 2019 Indian Institute of Technology, Madras */
/* Copyright 2019 University of Cambridge */
/* */
/* All rights reserved. This file is distributed under the terms of */
/* the GNU Lesser General Public License version 2.1, with the */
/* special exception on linking described in the file LICENSE. */
/* */
/**************************************************************************/
DOMAIN_STATE(value*, young_ptr)
DOMAIN_STATE(value*, young_limit)
/* Minor heap limit. See minor_gc.c. */
DOMAIN_STATE(char*, exception_pointer)
/* Exception pointer that points into the current stack */
DOMAIN_STATE(void*, young_base)
DOMAIN_STATE(value*, young_start)
DOMAIN_STATE(value*, young_end)
DOMAIN_STATE(value*, young_alloc_start)
DOMAIN_STATE(value*, young_alloc_end)
DOMAIN_STATE(value*, young_alloc_mid)
DOMAIN_STATE(value*, young_trigger)
DOMAIN_STATE(asize_t, minor_heap_wsz)
DOMAIN_STATE(intnat, in_minor_collection)
DOMAIN_STATE(double, extra_heap_resources_minor)
DOMAIN_STATE(struct caml_ref_table*, ref_table)
DOMAIN_STATE(struct caml_ephe_ref_table*, ephe_ref_table)
DOMAIN_STATE(struct caml_custom_table*, custom_table)
/* See minor_gc.c */
DOMAIN_STATE(value*, stack_low)
DOMAIN_STATE(value*, stack_high)
DOMAIN_STATE(value*, stack_threshold)
DOMAIN_STATE(value*, extern_sp)
DOMAIN_STATE(value*, trapsp)
DOMAIN_STATE(value*, trap_barrier)
DOMAIN_STATE(struct longjmp_buffer*, external_raise)
DOMAIN_STATE(value, exn_bucket)
/* See interp.c */
DOMAIN_STATE(char*, top_of_stack)
DOMAIN_STATE(char*, bottom_of_stack)
DOMAIN_STATE(uintnat, last_return_address)
DOMAIN_STATE(value*, gc_regs)
/* See roots_nat.c */
DOMAIN_STATE(intnat, backtrace_active)
DOMAIN_STATE(intnat, backtrace_pos)
DOMAIN_STATE(backtrace_slot*, backtrace_buffer)
DOMAIN_STATE(value, backtrace_last_exn)
/* See backtrace.c */
DOMAIN_STATE(intnat, compare_unordered)
DOMAIN_STATE(intnat, requested_major_slice)
DOMAIN_STATE(intnat, requested_minor_gc)
DOMAIN_STATE(struct caml__roots_block *, local_roots)
DOMAIN_STATE(double, stat_minor_words)
DOMAIN_STATE(double, stat_promoted_words)
DOMAIN_STATE(double, stat_major_words)
DOMAIN_STATE(intnat, stat_minor_collections)
DOMAIN_STATE(intnat, stat_major_collections)
DOMAIN_STATE(intnat, stat_heap_wsz)
DOMAIN_STATE(intnat, stat_top_heap_wsz)
DOMAIN_STATE(intnat, stat_compactions)
DOMAIN_STATE(intnat, stat_heap_chunks)
/* See gc_ctrl.c */

View File

@ -59,8 +59,6 @@ struct longjmp_buffer {
#define siglongjmp(buf,val) longjmp(buf,val) #define siglongjmp(buf,val) longjmp(buf,val)
#endif #endif
CAMLextern struct longjmp_buffer * caml_external_raise;
extern value caml_exn_bucket;
int caml_is_special_exception(value exn); int caml_is_special_exception(value exn);
#endif /* CAML_INTERNALS */ #endif /* CAML_INTERNALS */

View File

@ -20,19 +20,6 @@
#include "misc.h" #include "misc.h"
extern double
caml_stat_minor_words,
caml_stat_promoted_words,
caml_stat_major_words;
extern intnat
caml_stat_minor_collections,
caml_stat_major_collections,
caml_stat_heap_wsz,
caml_stat_top_heap_wsz,
caml_stat_compactions,
caml_stat_heap_chunks;
uintnat caml_normalize_heap_increment (uintnat); uintnat caml_normalize_heap_increment (uintnat);
/* /*

View File

@ -18,7 +18,7 @@
#ifndef CAML_MEMORY_H #ifndef CAML_MEMORY_H
#define CAML_MEMORY_H #define CAML_MEMORY_H
#ifndef CAML_NAME_SPACE #ifndef CAML_INTERNALS
#include "compatibility.h" #include "compatibility.h"
#endif #endif
#include "config.h" #include "config.h"
@ -30,12 +30,12 @@
#endif /* CAML_INTERNALS */ #endif /* CAML_INTERNALS */
#include "misc.h" #include "misc.h"
#include "mlvalues.h" #include "mlvalues.h"
#include "domain.h"
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
CAMLextern value caml_alloc_shr (mlsize_t wosize, tag_t); CAMLextern value caml_alloc_shr (mlsize_t wosize, tag_t);
#ifdef WITH_PROFINFO #ifdef WITH_PROFINFO
CAMLextern value caml_alloc_shr_with_profinfo (mlsize_t, tag_t, intnat); CAMLextern value caml_alloc_shr_with_profinfo (mlsize_t, tag_t, intnat);
@ -222,16 +222,16 @@ extern void caml_alloc_small_dispatch (tag_t tag, intnat wosize, int flags);
CAMLassert ((wosize) >= 1); \ CAMLassert ((wosize) >= 1); \
CAMLassert ((tag_t) (tag) < 256); \ CAMLassert ((tag_t) (tag) < 256); \
CAMLassert ((wosize) <= Max_young_wosize); \ CAMLassert ((wosize) <= Max_young_wosize); \
caml_young_ptr -= Whsize_wosize (wosize); \ Caml_state_field(young_ptr) -= Whsize_wosize (wosize); \
if (caml_young_ptr < caml_young_limit) { \ if (Caml_state_field(young_ptr) < Caml_state_field(young_limit)) { \
Setup_for_gc; \ Setup_for_gc; \
caml_alloc_small_dispatch((tag), (wosize), \ caml_alloc_small_dispatch((tag), (wosize), \
(track) | Alloc_small_origin); \ (track) | Alloc_small_origin); \
Restore_after_gc; \ Restore_after_gc; \
} \ } \
Hd_hp (caml_young_ptr) = \ Hd_hp (Caml_state_field(young_ptr)) = \
Make_header_with_profinfo ((wosize), (tag), Caml_black, profinfo); \ Make_header_with_profinfo ((wosize), (tag), Caml_black, profinfo); \
(result) = Val_hp (caml_young_ptr); \ (result) = Val_hp (Caml_state_field(young_ptr)); \
DEBUG_clear ((result), (wosize)); \ DEBUG_clear ((result), (wosize)); \
}while(0) }while(0)
@ -271,8 +271,6 @@ struct caml__roots_block {
value *tables [5]; value *tables [5];
}; };
CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
/* The following macros are used to declare C local variables and /* The following macros are used to declare C local variables and
function parameters of type [value]. function parameters of type [value].
@ -305,7 +303,7 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
*/ */
#define CAMLparam0() \ #define CAMLparam0() \
struct caml__roots_block *caml__frame = caml_local_roots struct caml__roots_block *caml__frame = Caml_state_field(local_roots)
#define CAMLparam1(x) \ #define CAMLparam1(x) \
CAMLparam0 (); \ CAMLparam0 (); \
@ -357,8 +355,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
struct caml__roots_block caml__roots_##x; \ struct caml__roots_block caml__roots_##x; \
CAMLunused_start int caml__dummy_##x = ( \ CAMLunused_start int caml__dummy_##x = ( \
(void) caml__frame, \ (void) caml__frame, \
(caml__roots_##x.next = caml_local_roots), \ (caml__roots_##x.next = Caml_state_field(local_roots)), \
(caml_local_roots = &caml__roots_##x), \ (Caml_state_field(local_roots) = &caml__roots_##x), \
(caml__roots_##x.nitems = 1), \ (caml__roots_##x.nitems = 1), \
(caml__roots_##x.ntables = 1), \ (caml__roots_##x.ntables = 1), \
(caml__roots_##x.tables [0] = &x), \ (caml__roots_##x.tables [0] = &x), \
@ -369,8 +367,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
struct caml__roots_block caml__roots_##x; \ struct caml__roots_block caml__roots_##x; \
CAMLunused_start int caml__dummy_##x = ( \ CAMLunused_start int caml__dummy_##x = ( \
(void) caml__frame, \ (void) caml__frame, \
(caml__roots_##x.next = caml_local_roots), \ (caml__roots_##x.next = Caml_state_field(local_roots)), \
(caml_local_roots = &caml__roots_##x), \ (Caml_state_field(local_roots) = &caml__roots_##x), \
(caml__roots_##x.nitems = 1), \ (caml__roots_##x.nitems = 1), \
(caml__roots_##x.ntables = 2), \ (caml__roots_##x.ntables = 2), \
(caml__roots_##x.tables [0] = &x), \ (caml__roots_##x.tables [0] = &x), \
@ -382,8 +380,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
struct caml__roots_block caml__roots_##x; \ struct caml__roots_block caml__roots_##x; \
CAMLunused_start int caml__dummy_##x = ( \ CAMLunused_start int caml__dummy_##x = ( \
(void) caml__frame, \ (void) caml__frame, \
(caml__roots_##x.next = caml_local_roots), \ (caml__roots_##x.next = Caml_state_field(local_roots)), \
(caml_local_roots = &caml__roots_##x), \ (Caml_state_field(local_roots) = &caml__roots_##x), \
(caml__roots_##x.nitems = 1), \ (caml__roots_##x.nitems = 1), \
(caml__roots_##x.ntables = 3), \ (caml__roots_##x.ntables = 3), \
(caml__roots_##x.tables [0] = &x), \ (caml__roots_##x.tables [0] = &x), \
@ -396,8 +394,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
struct caml__roots_block caml__roots_##x; \ struct caml__roots_block caml__roots_##x; \
CAMLunused_start int caml__dummy_##x = ( \ CAMLunused_start int caml__dummy_##x = ( \
(void) caml__frame, \ (void) caml__frame, \
(caml__roots_##x.next = caml_local_roots), \ (caml__roots_##x.next = Caml_state_field(local_roots)), \
(caml_local_roots = &caml__roots_##x), \ (Caml_state_field(local_roots) = &caml__roots_##x), \
(caml__roots_##x.nitems = 1), \ (caml__roots_##x.nitems = 1), \
(caml__roots_##x.ntables = 4), \ (caml__roots_##x.ntables = 4), \
(caml__roots_##x.tables [0] = &x), \ (caml__roots_##x.tables [0] = &x), \
@ -411,8 +409,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
struct caml__roots_block caml__roots_##x; \ struct caml__roots_block caml__roots_##x; \
CAMLunused_start int caml__dummy_##x = ( \ CAMLunused_start int caml__dummy_##x = ( \
(void) caml__frame, \ (void) caml__frame, \
(caml__roots_##x.next = caml_local_roots), \ (caml__roots_##x.next = Caml_state_field(local_roots)), \
(caml_local_roots = &caml__roots_##x), \ (Caml_state_field(local_roots) = &caml__roots_##x), \
(caml__roots_##x.nitems = 1), \ (caml__roots_##x.nitems = 1), \
(caml__roots_##x.ntables = 5), \ (caml__roots_##x.ntables = 5), \
(caml__roots_##x.tables [0] = &x), \ (caml__roots_##x.tables [0] = &x), \
@ -427,8 +425,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
struct caml__roots_block caml__roots_##x; \ struct caml__roots_block caml__roots_##x; \
CAMLunused_start int caml__dummy_##x = ( \ CAMLunused_start int caml__dummy_##x = ( \
(void) caml__frame, \ (void) caml__frame, \
(caml__roots_##x.next = caml_local_roots), \ (caml__roots_##x.next = Caml_state_field(local_roots)), \
(caml_local_roots = &caml__roots_##x), \ (Caml_state_field(local_roots) = &caml__roots_##x), \
(caml__roots_##x.nitems = (size)), \ (caml__roots_##x.nitems = (size)), \
(caml__roots_##x.ntables = 1), \ (caml__roots_##x.ntables = 1), \
(caml__roots_##x.tables[0] = &(x[0])), \ (caml__roots_##x.tables[0] = &(x[0])), \
@ -464,7 +462,7 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
CAMLxparamN (x, (size)) CAMLxparamN (x, (size))
#define CAMLdrop caml_local_roots = caml__frame #define CAMLdrop Caml_state_field(local_roots) = caml__frame
#define CAMLreturn0 do{ \ #define CAMLreturn0 do{ \
CAMLdrop; \ CAMLdrop; \
@ -513,16 +511,16 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
#define Begin_roots1(r0) { \ #define Begin_roots1(r0) { \
struct caml__roots_block caml__roots_block; \ struct caml__roots_block caml__roots_block; \
caml__roots_block.next = caml_local_roots; \ caml__roots_block.next = Caml_state_field(local_roots); \
caml_local_roots = &caml__roots_block; \ Caml_state_field(local_roots) = &caml__roots_block; \
caml__roots_block.nitems = 1; \ caml__roots_block.nitems = 1; \
caml__roots_block.ntables = 1; \ caml__roots_block.ntables = 1; \
caml__roots_block.tables[0] = &(r0); caml__roots_block.tables[0] = &(r0);
#define Begin_roots2(r0, r1) { \ #define Begin_roots2(r0, r1) { \
struct caml__roots_block caml__roots_block; \ struct caml__roots_block caml__roots_block; \
caml__roots_block.next = caml_local_roots; \ caml__roots_block.next = Caml_state_field(local_roots); \
caml_local_roots = &caml__roots_block; \ Caml_state_field(local_roots) = &caml__roots_block; \
caml__roots_block.nitems = 1; \ caml__roots_block.nitems = 1; \
caml__roots_block.ntables = 2; \ caml__roots_block.ntables = 2; \
caml__roots_block.tables[0] = &(r0); \ caml__roots_block.tables[0] = &(r0); \
@ -530,8 +528,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
#define Begin_roots3(r0, r1, r2) { \ #define Begin_roots3(r0, r1, r2) { \
struct caml__roots_block caml__roots_block; \ struct caml__roots_block caml__roots_block; \
caml__roots_block.next = caml_local_roots; \ caml__roots_block.next = Caml_state_field(local_roots); \
caml_local_roots = &caml__roots_block; \ Caml_state_field(local_roots) = &caml__roots_block; \
caml__roots_block.nitems = 1; \ caml__roots_block.nitems = 1; \
caml__roots_block.ntables = 3; \ caml__roots_block.ntables = 3; \
caml__roots_block.tables[0] = &(r0); \ caml__roots_block.tables[0] = &(r0); \
@ -540,8 +538,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
#define Begin_roots4(r0, r1, r2, r3) { \ #define Begin_roots4(r0, r1, r2, r3) { \
struct caml__roots_block caml__roots_block; \ struct caml__roots_block caml__roots_block; \
caml__roots_block.next = caml_local_roots; \ caml__roots_block.next = Caml_state_field(local_roots); \
caml_local_roots = &caml__roots_block; \ Caml_state_field(local_roots) = &caml__roots_block; \
caml__roots_block.nitems = 1; \ caml__roots_block.nitems = 1; \
caml__roots_block.ntables = 4; \ caml__roots_block.ntables = 4; \
caml__roots_block.tables[0] = &(r0); \ caml__roots_block.tables[0] = &(r0); \
@ -551,8 +549,8 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
#define Begin_roots5(r0, r1, r2, r3, r4) { \ #define Begin_roots5(r0, r1, r2, r3, r4) { \
struct caml__roots_block caml__roots_block; \ struct caml__roots_block caml__roots_block; \
caml__roots_block.next = caml_local_roots; \ caml__roots_block.next = Caml_state_field(local_roots); \
caml_local_roots = &caml__roots_block; \ Caml_state_field(local_roots) = &caml__roots_block; \
caml__roots_block.nitems = 1; \ caml__roots_block.nitems = 1; \
caml__roots_block.ntables = 5; \ caml__roots_block.ntables = 5; \
caml__roots_block.tables[0] = &(r0); \ caml__roots_block.tables[0] = &(r0); \
@ -563,13 +561,13 @@ CAMLextern struct caml__roots_block *caml_local_roots; /* defined in roots.c */
#define Begin_roots_block(table, size) { \ #define Begin_roots_block(table, size) { \
struct caml__roots_block caml__roots_block; \ struct caml__roots_block caml__roots_block; \
caml__roots_block.next = caml_local_roots; \ caml__roots_block.next = Caml_state_field(local_roots); \
caml_local_roots = &caml__roots_block; \ Caml_state_field(local_roots) = &caml__roots_block; \
caml__roots_block.nitems = (size); \ caml__roots_block.nitems = (size); \
caml__roots_block.ntables = 1; \ caml__roots_block.ntables = 1; \
caml__roots_block.tables[0] = (table); caml__roots_block.tables[0] = (table);
#define End_roots() caml_local_roots = caml__roots_block.next; } #define End_roots() Caml_state_field(local_roots) = caml__roots_block.next; }
/* [caml_register_global_root] registers a global C variable as a memory root /* [caml_register_global_root] registers a global C variable as a memory root

View File

@ -16,18 +16,12 @@
#ifndef CAML_MINOR_GC_H #ifndef CAML_MINOR_GC_H
#define CAML_MINOR_GC_H #define CAML_MINOR_GC_H
#ifndef CAML_INTERNALS
#include "compatibility.h"
#endif
#include "address_class.h" #include "address_class.h"
#include "config.h" #include "config.h"
CAMLextern value *caml_young_start, *caml_young_end;
CAMLextern value *caml_young_alloc_start, *caml_young_alloc_end;
CAMLextern value *caml_young_ptr, *caml_young_limit;
CAMLextern value *caml_young_trigger;
extern asize_t caml_minor_heap_wsz;
extern int caml_in_minor_collection;
extern double caml_extra_heap_resources_minor;
#define CAML_TABLE_STRUCT(t) { \ #define CAML_TABLE_STRUCT(t) { \
t *base; \ t *base; \
t *end; \ t *end; \
@ -39,7 +33,6 @@ extern double caml_extra_heap_resources_minor;
} }
struct caml_ref_table CAML_TABLE_STRUCT(value *); struct caml_ref_table CAML_TABLE_STRUCT(value *);
CAMLextern struct caml_ref_table caml_ref_table;
struct caml_ephe_ref_elt { struct caml_ephe_ref_elt {
value ephe; /* an ephemeron in major heap */ value ephe; /* an ephemeron in major heap */
@ -47,7 +40,6 @@ struct caml_ephe_ref_elt {
}; };
struct caml_ephe_ref_table CAML_TABLE_STRUCT(struct caml_ephe_ref_elt); struct caml_ephe_ref_table CAML_TABLE_STRUCT(struct caml_ephe_ref_elt);
CAMLextern struct caml_ephe_ref_table caml_ephe_ref_table;
struct caml_custom_elt { struct caml_custom_elt {
value block; /* The finalized block in the minor heap. */ value block; /* The finalized block in the minor heap. */
@ -56,12 +48,16 @@ struct caml_custom_elt {
}; };
struct caml_custom_table CAML_TABLE_STRUCT(struct caml_custom_elt); struct caml_custom_table CAML_TABLE_STRUCT(struct caml_custom_elt);
CAMLextern struct caml_custom_table caml_custom_table; /* Table of custom blocks in the minor heap that contain finalizers
or GC speed parameters. */
extern void caml_set_minor_heap_size (asize_t); /* size in bytes */ extern void caml_set_minor_heap_size (asize_t); /* size in bytes */
extern void caml_empty_minor_heap (void); extern void caml_empty_minor_heap (void);
CAMLextern void caml_gc_dispatch (void); CAMLextern void caml_gc_dispatch (void);
CAMLextern void garbage_collection (void); /* runtime/signals_nat.c */ CAMLextern void garbage_collection (void); /* runtime/signals_nat.c */
extern void caml_oldify_one (value, value *);
extern void caml_oldify_mopup (void);
extern void caml_realloc_ref_table (struct caml_ref_table *); extern void caml_realloc_ref_table (struct caml_ref_table *);
extern void caml_alloc_table (struct caml_ref_table *, asize_t, asize_t); extern void caml_alloc_table (struct caml_ref_table *, asize_t, asize_t);
extern void caml_realloc_ephe_ref_table (struct caml_ephe_ref_table *); extern void caml_realloc_ephe_ref_table (struct caml_ephe_ref_table *);
@ -70,8 +66,7 @@ extern void caml_alloc_ephe_table (struct caml_ephe_ref_table *,
extern void caml_realloc_custom_table (struct caml_custom_table *); extern void caml_realloc_custom_table (struct caml_custom_table *);
extern void caml_alloc_custom_table (struct caml_custom_table *, extern void caml_alloc_custom_table (struct caml_custom_table *,
asize_t, asize_t); asize_t, asize_t);
extern void caml_oldify_one (value, value *); void caml_alloc_minor_tables (void);
extern void caml_oldify_mopup (void);
#define Oldify(p) do{ \ #define Oldify(p) do{ \
value __oldify__v__ = *p; \ value __oldify__v__ = *p; \

View File

@ -83,6 +83,37 @@ typedef char * addr;
#define CAMLweakdef #define CAMLweakdef
#endif #endif
/* Alignment */
#ifdef __GNUC__
#define CAMLalign(n) __attribute__((aligned(n)))
#elif _MSC_VER >= 1500
#define CAMLalign(n) __declspec(align(n))
#else
#error "How do I align values on this platform?"
#endif
/* CAMLunused is preserved for compatibility reasons.
Instead of the legacy GCC/Clang-only
CAMLunused foo;
you should prefer
CAMLunused_start foo CAMLunused_end;
which supports both GCC/Clang and MSVC.
*/
#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 7))
#define CAMLunused_start __attribute__ ((unused))
#define CAMLunused_end
#define CAMLunused __attribute__ ((unused))
#elif _MSC_VER >= 1500
#define CAMLunused_start __pragma( warning (push) ) \
__pragma( warning (disable:4189 ) )
#define CAMLunused_end __pragma( warning (pop))
#define CAMLunused
#else
#define CAMLunused_start
#define CAMLunused_end
#define CAMLunused
#endif
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
@ -95,6 +126,14 @@ extern caml_timing_hook caml_major_slice_begin_hook, caml_major_slice_end_hook;
extern caml_timing_hook caml_minor_gc_begin_hook, caml_minor_gc_end_hook; extern caml_timing_hook caml_minor_gc_begin_hook, caml_minor_gc_end_hook;
extern caml_timing_hook caml_finalise_begin_hook, caml_finalise_end_hook; extern caml_timing_hook caml_finalise_begin_hook, caml_finalise_end_hook;
#define CAML_STATIC_ASSERT_3(b, l) \
CAMLunused_start \
char static_assertion_failure_line_##l[(b) ? 1 : -1] \
CAMLunused_end
#define CAML_STATIC_ASSERT_2(b, l) CAML_STATIC_ASSERT_3(b, l)
#define CAML_STATIC_ASSERT(b) CAML_STATIC_ASSERT_2(b, __LINE__)
/* Windows Unicode support (rest below - char_os is needed earlier) */ /* Windows Unicode support (rest below - char_os is needed earlier) */
#ifdef _WIN32 #ifdef _WIN32
@ -353,7 +392,6 @@ extern int caml_snprintf(char * buf, size_t size, const char * format, ...);
#include <time.h> #include <time.h>
#include <stdio.h> #include <stdio.h>
extern intnat caml_stat_minor_collections;
extern intnat caml_instr_starttime, caml_instr_stoptime; extern intnat caml_instr_starttime, caml_instr_stoptime;
struct caml_instr_block { struct caml_instr_block {
@ -371,15 +409,15 @@ extern struct caml_instr_block *caml_instr_log;
/* Allocate the data block for a given name. /* Allocate the data block for a given name.
[t] must have been declared with [CAML_INSTR_DECLARE]. */ [t] must have been declared with [CAML_INSTR_DECLARE]. */
#define CAML_INSTR_ALLOC(t) do{ \ #define CAML_INSTR_ALLOC(t) do{ \
if (caml_stat_minor_collections >= caml_instr_starttime \ if (Caml_state_field(stat_minor_collections) >= caml_instr_starttime \
&& caml_stat_minor_collections < caml_instr_stoptime){ \ && Caml_state_field(stat_minor_collections) < caml_instr_stoptime){ \
t = caml_stat_alloc_noexc (sizeof (struct caml_instr_block)); \ t = caml_stat_alloc_noexc (sizeof (struct caml_instr_block)); \
t->index = 0; \ t->index = 0; \
t->tag[0] = ""; \ t->tag[0] = ""; \
t->next = caml_instr_log; \ t->next = caml_instr_log; \
caml_instr_log = t; \ caml_instr_log = t; \
} \ } \
}while(0) }while(0)
/* Allocate the data block and start the timer. /* Allocate the data block and start the timer.
@ -468,6 +506,14 @@ int caml_find_code_fragment(char *pc, int *index, struct code_fragment **cf);
#endif /* CAML_INTERNALS */ #endif /* CAML_INTERNALS */
/* The [backtrace_slot] type represents values stored in the
* [caml_backtrace_buffer]. In bytecode, it is the same as a
* [code_t], in native code it as a [frame_descr *]. The difference
* doesn't matter for code outside [backtrace_{byt,nat}.c],
* so it is just exposed as a [void *].
*/
typedef void * backtrace_slot;
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif

View File

@ -64,6 +64,8 @@ typedef unsigned int tag_t; /* Actually, an unsigned char */
typedef uintnat color_t; typedef uintnat color_t;
typedef uintnat mark_t; typedef uintnat mark_t;
#include "domain_state.h"
/* Longs vs blocks. */ /* Longs vs blocks. */
#define Is_long(x) (((x) & 1) != 0) #define Is_long(x) (((x) & 1) != 0)
#define Is_block(x) (((x) & 1) == 0) #define Is_block(x) (((x) & 1) == 0)

View File

@ -33,8 +33,6 @@ extern "C" {
#ifdef CAML_INTERNALS #ifdef CAML_INTERNALS
CAMLextern intnat volatile caml_pending_signals[]; CAMLextern intnat volatile caml_pending_signals[];
CAMLextern int volatile caml_something_to_do; CAMLextern int volatile caml_something_to_do;
extern int volatile caml_requested_major_slice;
extern int volatile caml_requested_minor_gc;
void caml_update_young_limit(void); void caml_update_young_limit(void);
void caml_request_major_slice (void); void caml_request_major_slice (void);

View File

@ -107,11 +107,6 @@ extern uintnat caml_stack_usage (void);
extern uintnat (*caml_stack_usage_hook)(void); extern uintnat (*caml_stack_usage_hook)(void);
/* Declaration of variables used in the asm code */ /* Declaration of variables used in the asm code */
extern char * caml_top_of_stack;
extern char * caml_bottom_of_stack;
extern uintnat caml_last_return_address;
extern value * caml_gc_regs;
extern char * caml_exception_pointer;
extern value * caml_globals[]; extern value * caml_globals[];
extern char caml_globals_map[]; extern char caml_globals_map[];
extern intnat caml_globals_inited; extern intnat caml_globals_inited;

View File

@ -24,13 +24,6 @@
#include "mlvalues.h" #include "mlvalues.h"
#include "memory.h" #include "memory.h"
CAMLextern value * caml_stack_low;
CAMLextern value * caml_stack_high;
CAMLextern value * caml_stack_threshold;
CAMLextern value * caml_extern_sp;
CAMLextern value * caml_trapsp;
CAMLextern value * caml_trap_barrier;
#define Trap_pc(tp) (((code_t *)(tp))[0]) #define Trap_pc(tp) (((code_t *)(tp))[0])
#define Trap_link(tp) (((value **)(tp))[1]) #define Trap_link(tp) (((value **)(tp))[1])

View File

@ -184,7 +184,7 @@ static inline void caml_ephe_clean (value v){
}else{ }else{
Field (v, i) = child = f; Field (v, i) = child = f;
if (Is_block (f) && Is_young (f)) if (Is_block (f) && Is_young (f))
add_to_ephe_ref_table(&caml_ephe_ref_table, v, i); add_to_ephe_ref_table(Caml_state_field(ephe_ref_table), v, i);
goto ephemeron_again; goto ephemeron_again;
} }
} }

View File

@ -418,7 +418,7 @@ static void do_compaction (void)
ch = Chunk_next (ch); ch = Chunk_next (ch);
} }
} }
++ caml_stat_compactions; ++ Caml_state->stat_compactions;
caml_gc_message (0x10, "done.\n"); caml_gc_message (0x10, "done.\n");
} }
@ -429,10 +429,13 @@ void caml_compact_heap (void)
uintnat target_wsz, live; uintnat target_wsz, live;
CAML_INSTR_SETUP(tmr, "compact"); CAML_INSTR_SETUP(tmr, "compact");
CAMLassert (caml_young_ptr == caml_young_alloc_end); CAMLassert (Caml_state->young_ptr == Caml_state->young_alloc_end);
CAMLassert (caml_ref_table.ptr == caml_ref_table.base); CAMLassert (Caml_state->ref_table->ptr ==
CAMLassert (caml_ephe_ref_table.ptr == caml_ephe_ref_table.base); Caml_state->ref_table->base);
CAMLassert (caml_custom_table.ptr == caml_custom_table.base); CAMLassert (Caml_state->ephe_ref_table->ptr ==
Caml_state->ephe_ref_table->base);
CAMLassert (Caml_state->custom_table->ptr ==
Caml_state->custom_table->base);
do_compaction (); do_compaction ();
CAML_INSTR_TIME (tmr, "compact/main"); CAML_INSTR_TIME (tmr, "compact/main");
@ -461,18 +464,18 @@ void caml_compact_heap (void)
We recompact if target_wsz < heap_size / 2 We recompact if target_wsz < heap_size / 2
*/ */
live = caml_stat_heap_wsz - caml_fl_cur_wsz; live = Caml_state->stat_heap_wsz - caml_fl_cur_wsz;
target_wsz = live + caml_percent_free * (live / 100 + 1) target_wsz = live + caml_percent_free * (live / 100 + 1)
+ Wsize_bsize (Page_size); + Wsize_bsize (Page_size);
target_wsz = caml_clip_heap_chunk_wsz (target_wsz); target_wsz = caml_clip_heap_chunk_wsz (target_wsz);
#ifdef HAS_HUGE_PAGES #ifdef HAS_HUGE_PAGES
if (caml_use_huge_pages if (caml_use_huge_pages
&& Bsize_wsize (caml_stat_heap_wsz) <= HUGE_PAGE_SIZE) && Bsize_wsize (Caml_state->stat_heap_wsz) <= HUGE_PAGE_SIZE)
return; return;
#endif #endif
if (target_wsz < caml_stat_heap_wsz / 2){ if (target_wsz < Caml_state->stat_heap_wsz / 2){
/* Recompact. */ /* Recompact. */
char *chunk; char *chunk;
@ -492,15 +495,15 @@ void caml_compact_heap (void)
} }
Chunk_next (chunk) = caml_heap_start; Chunk_next (chunk) = caml_heap_start;
caml_heap_start = chunk; caml_heap_start = chunk;
++ caml_stat_heap_chunks; ++ Caml_state->stat_heap_chunks;
caml_stat_heap_wsz += Wsize_bsize (Chunk_size (chunk)); Caml_state->stat_heap_wsz += Wsize_bsize (Chunk_size (chunk));
if (caml_stat_heap_wsz > caml_stat_top_heap_wsz){ if (Caml_state->stat_heap_wsz > Caml_state->stat_top_heap_wsz){
caml_stat_top_heap_wsz = caml_stat_heap_wsz; Caml_state->stat_top_heap_wsz = Caml_state->stat_heap_wsz;
} }
do_compaction (); do_compaction ();
CAMLassert (caml_stat_heap_chunks == 1); CAMLassert (Caml_state->stat_heap_chunks == 1);
CAMLassert (Chunk_next (caml_heap_start) == NULL); CAMLassert (Chunk_next (caml_heap_start) == NULL);
CAMLassert (caml_stat_heap_wsz == Wsize_bsize (Chunk_size (chunk))); CAMLassert (Caml_state->stat_heap_wsz == Wsize_bsize (Chunk_size (chunk)));
CAML_INSTR_TIME (tmr, "compact/recompact"); CAML_INSTR_TIME (tmr, "compact/recompact");
} }
} }
@ -511,29 +514,29 @@ void caml_compact_heap_maybe (void)
FW = fl_size_at_phase_change + 3 * (caml_fl_cur_wsz FW = fl_size_at_phase_change + 3 * (caml_fl_cur_wsz
- caml_fl_wsz_at_phase_change) - caml_fl_wsz_at_phase_change)
FW = 3 * caml_fl_cur_wsz - 2 * caml_fl_wsz_at_phase_change FW = 3 * caml_fl_cur_wsz - 2 * caml_fl_wsz_at_phase_change
Estimated live words: LW = caml_stat_heap_wsz - FW Estimated live words: LW = Caml_state->stat_heap_wsz - FW
Estimated free percentage: FP = 100 * FW / LW Estimated free percentage: FP = 100 * FW / LW
We compact the heap if FP > caml_percent_max We compact the heap if FP > caml_percent_max
*/ */
double fw, fp; double fw, fp;
CAMLassert (caml_gc_phase == Phase_idle); CAMLassert (caml_gc_phase == Phase_idle);
if (caml_percent_max >= 1000000) return; if (caml_percent_max >= 1000000) return;
if (caml_stat_major_collections < 3) return; if (Caml_state->stat_major_collections < 3) return;
if (caml_stat_heap_wsz <= 2 * caml_clip_heap_chunk_wsz (0)) return; if (Caml_state->stat_heap_wsz <= 2 * caml_clip_heap_chunk_wsz (0)) return;
#ifdef HAS_HUGE_PAGES #ifdef HAS_HUGE_PAGES
if (caml_use_huge_pages if (caml_use_huge_pages
&& Bsize_wsize (caml_stat_heap_wsz) <= HUGE_PAGE_SIZE) && Bsize_wsize (Caml_state->stat_heap_wsz) <= HUGE_PAGE_SIZE)
return; return;
#endif #endif
fw = 3.0 * caml_fl_cur_wsz - 2.0 * caml_fl_wsz_at_phase_change; fw = 3.0 * caml_fl_cur_wsz - 2.0 * caml_fl_wsz_at_phase_change;
if (fw < 0) fw = caml_fl_cur_wsz; if (fw < 0) fw = caml_fl_cur_wsz;
if (fw >= caml_stat_heap_wsz){ if (fw >= Caml_state->stat_heap_wsz){
fp = 1000000.0; fp = 1000000.0;
}else{ }else{
fp = 100.0 * fw / (caml_stat_heap_wsz - fw); fp = 100.0 * fw / (Caml_state->stat_heap_wsz - fw);
if (fp > 1000000.0) fp = 1000000.0; if (fp > 1000000.0) fp = 1000000.0;
} }
caml_gc_message (0x200, "FL size at phase change = %" caml_gc_message (0x200, "FL size at phase change = %"
@ -551,7 +554,7 @@ void caml_compact_heap_maybe (void)
caml_finish_major_cycle (); caml_finish_major_cycle ();
fw = caml_fl_cur_wsz; fw = caml_fl_cur_wsz;
fp = 100.0 * fw / (caml_stat_heap_wsz - fw); fp = 100.0 * fw / (Caml_state->stat_heap_wsz - fw);
caml_gc_message (0x200, "Measured overhead: %" caml_gc_message (0x200, "Measured overhead: %"
ARCH_INTNAT_PRINTF_FORMAT "u%%\n", ARCH_INTNAT_PRINTF_FORMAT "u%%\n",
(uintnat) fp); (uintnat) fp);

View File

@ -30,7 +30,6 @@ struct compare_item { value * v1, * v2; mlsize_t count; };
#define COMPARE_STACK_INIT_SIZE 8 #define COMPARE_STACK_INIT_SIZE 8
#define COMPARE_STACK_MIN_ALLOC_SIZE 32 #define COMPARE_STACK_MIN_ALLOC_SIZE 32
#define COMPARE_STACK_MAX_SIZE (1024*1024) #define COMPARE_STACK_MAX_SIZE (1024*1024)
CAMLexport int caml_compare_unordered;
struct compare_stack { struct compare_stack {
struct compare_item init_stack[COMPARE_STACK_INIT_SIZE]; struct compare_item init_stack[COMPARE_STACK_INIT_SIZE];
@ -140,9 +139,9 @@ static intnat do_compare_val(struct compare_stack* stk,
int res; int res;
int (*compare)(value v1, value v2) = Custom_ops_val(v2)->compare_ext; int (*compare)(value v1, value v2) = Custom_ops_val(v2)->compare_ext;
if (compare == NULL) break; /* for backward compatibility */ if (compare == NULL) break; /* for backward compatibility */
caml_compare_unordered = 0; Caml_state->compare_unordered = 0;
res = compare(v1, v2); res = compare(v1, v2);
if (caml_compare_unordered && !total) return UNORDERED; if (Caml_state->compare_unordered && !total) return UNORDERED;
if (res != 0) return res; if (res != 0) return res;
goto next_item; goto next_item;
} }
@ -163,9 +162,9 @@ static intnat do_compare_val(struct compare_stack* stk,
int res; int res;
int (*compare)(value v1, value v2) = Custom_ops_val(v1)->compare_ext; int (*compare)(value v1, value v2) = Custom_ops_val(v1)->compare_ext;
if (compare == NULL) break; /* for backward compatibility */ if (compare == NULL) break; /* for backward compatibility */
caml_compare_unordered = 0; Caml_state->compare_unordered = 0;
res = compare(v1, v2); res = compare(v1, v2);
if (caml_compare_unordered && !total) return UNORDERED; if (Caml_state->compare_unordered && !total) return UNORDERED;
if (res != 0) return res; if (res != 0) return res;
goto next_item; goto next_item;
} }
@ -261,9 +260,9 @@ static intnat do_compare_val(struct compare_stack* stk,
compare_free_stack(stk); compare_free_stack(stk);
caml_invalid_argument("compare: abstract value"); caml_invalid_argument("compare: abstract value");
} }
caml_compare_unordered = 0; Caml_state->compare_unordered = 0;
res = compare(v1, v2); res = compare(v1, v2);
if (caml_compare_unordered && !total) return UNORDERED; if (Caml_state->compare_unordered && !total) return UNORDERED;
if (res != 0) return res; if (res != 0) return res;
break; break;
} }

View File

@ -54,14 +54,15 @@ static value alloc_custom_gen (struct custom_operations * ops,
} }
/* The remaining [mem_minor] will be counted if the block survives a /* The remaining [mem_minor] will be counted if the block survives a
minor GC */ minor GC */
add_to_custom_table (&caml_custom_table, result, mem_minor, max_major); add_to_custom_table (Caml_state->custom_table, result,
mem_minor, max_major);
/* Keep track of extra resources held by custom block in /* Keep track of extra resources held by custom block in
minor heap. */ minor heap. */
if (mem_minor != 0) { if (mem_minor != 0) {
if (max_minor == 0) max_minor = 1; if (max_minor == 0) max_minor = 1;
caml_extra_heap_resources_minor += Caml_state->extra_heap_resources_minor +=
(double) mem_minor / (double) max_minor; (double) mem_minor / (double) max_minor;
if (caml_extra_heap_resources_minor > 1.0) { if (Caml_state->extra_heap_resources_minor > 1.0) {
caml_request_minor_gc (); caml_request_minor_gc ();
caml_gc_dispatch (); caml_gc_dispatch ();
} }
@ -91,10 +92,10 @@ CAMLexport value caml_alloc_custom_mem(struct custom_operations * ops,
mlsize_t mem_minor = mlsize_t mem_minor =
mem < caml_custom_minor_max_bsz ? mem : caml_custom_minor_max_bsz; mem < caml_custom_minor_max_bsz ? mem : caml_custom_minor_max_bsz;
return alloc_custom_gen (ops, bsz, mem, return alloc_custom_gen (ops, bsz, mem,
Bsize_wsize (caml_stat_heap_wsz) / 150 Bsize_wsize (Caml_state->stat_heap_wsz) / 150
* caml_custom_major_ratio, * caml_custom_major_ratio,
mem_minor, mem_minor,
Bsize_wsize (caml_minor_heap_wsz) / 100 Bsize_wsize (Caml_state->minor_heap_wsz) / 100
* caml_custom_major_ratio); * caml_custom_major_ratio);
} }

View File

@ -238,7 +238,7 @@ void caml_debugger_init(void)
} }
open_connection(); open_connection();
caml_debugger_in_use = 1; caml_debugger_in_use = 1;
caml_trap_barrier = caml_stack_high; Caml_state->trap_barrier = Caml_state->stack_high;
} }
static value getval(struct channel *chan) static value getval(struct channel *chan)
@ -259,15 +259,15 @@ static void safe_output_value(struct channel *chan, value val)
struct longjmp_buffer raise_buf, * saved_external_raise; struct longjmp_buffer raise_buf, * saved_external_raise;
/* Catch exceptions raised by [caml_output_val] */ /* Catch exceptions raised by [caml_output_val] */
saved_external_raise = caml_external_raise; saved_external_raise = Caml_state->external_raise;
if (sigsetjmp(raise_buf.buf, 0) == 0) { if (sigsetjmp(raise_buf.buf, 0) == 0) {
caml_external_raise = &raise_buf; Caml_state->external_raise = &raise_buf;
caml_output_val(chan, val, marshal_flags); caml_output_val(chan, val, marshal_flags);
} else { } else {
/* Send wrong magic number, will cause [caml_input_value] to fail */ /* Send wrong magic number, will cause [caml_input_value] to fail */
caml_really_putblock(chan, "\000\000\000\000", 4); caml_really_putblock(chan, "\000\000\000\000", 4);
} }
caml_external_raise = saved_external_raise; Caml_state->external_raise = saved_external_raise;
} }
struct breakpoint { struct breakpoint {
@ -381,7 +381,7 @@ void caml_debugger(enum event_kind event, value param)
if (dbg_socket == -1) return; /* Not connected to a debugger. */ if (dbg_socket == -1) return; /* Not connected to a debugger. */
/* Reset current frame */ /* Reset current frame */
frame = caml_extern_sp + 1; frame = Caml_state->extern_sp + 1;
/* Report the event to the debugger */ /* Report the event to the debugger */
switch(event) { switch(event) {
@ -423,7 +423,7 @@ void caml_debugger(enum event_kind event, value param)
} }
caml_putword(dbg_out, caml_event_count); caml_putword(dbg_out, caml_event_count);
if (event == EVENT_COUNT || event == BREAKPOINT) { if (event == EVENT_COUNT || event == BREAKPOINT) {
caml_putword(dbg_out, caml_stack_high - frame); caml_putword(dbg_out, Caml_state->stack_high - frame);
found = caml_find_code_fragment((char*) Pc(frame), &frag, &cf); found = caml_find_code_fragment((char*) Pc(frame), &frag, &cf);
CAMLassert(found); CAMLassert(found);
caml_putword(dbg_out, frag); caml_putword(dbg_out, frag);
@ -484,11 +484,11 @@ void caml_debugger(enum event_kind event, value param)
#endif #endif
break; break;
case REQ_INITIAL_FRAME: case REQ_INITIAL_FRAME:
frame = caml_extern_sp + 1; frame = Caml_state->extern_sp + 1;
/* Fall through */ /* Fall through */
case REQ_GET_FRAME: case REQ_GET_FRAME:
caml_putword(dbg_out, caml_stack_high - frame); caml_putword(dbg_out, Caml_state->stack_high - frame);
if (frame < caml_stack_high && if (frame < Caml_state->stack_high &&
caml_find_code_fragment((char*) Pc(frame), &frag, &cf)) { caml_find_code_fragment((char*) Pc(frame), &frag, &cf)) {
caml_putword(dbg_out, frag); caml_putword(dbg_out, frag);
caml_putword(dbg_out, (char*) Pc(frame) - cf->code_start); caml_putword(dbg_out, (char*) Pc(frame) - cf->code_start);
@ -500,17 +500,17 @@ void caml_debugger(enum event_kind event, value param)
break; break;
case REQ_SET_FRAME: case REQ_SET_FRAME:
i = caml_getword(dbg_in); i = caml_getword(dbg_in);
frame = caml_stack_high - i; frame = Caml_state->stack_high - i;
break; break;
case REQ_UP_FRAME: case REQ_UP_FRAME:
i = caml_getword(dbg_in); i = caml_getword(dbg_in);
newframe = frame + Extra_args(frame) + i + 3; newframe = frame + Extra_args(frame) + i + 3;
if (newframe >= caml_stack_high || if (newframe >= Caml_state->stack_high ||
!caml_find_code_fragment((char*) Pc(newframe), &frag, &cf)) { !caml_find_code_fragment((char*) Pc(newframe), &frag, &cf)) {
caml_putword(dbg_out, -1); caml_putword(dbg_out, -1);
} else { } else {
frame = newframe; frame = newframe;
caml_putword(dbg_out, caml_stack_high - frame); caml_putword(dbg_out, Caml_state->stack_high - frame);
caml_putword(dbg_out, frag); caml_putword(dbg_out, frag);
caml_putword(dbg_out, (char*) Pc(frame) - cf->code_start); caml_putword(dbg_out, (char*) Pc(frame) - cf->code_start);
} }
@ -518,7 +518,7 @@ void caml_debugger(enum event_kind event, value param)
break; break;
case REQ_SET_TRAP_BARRIER: case REQ_SET_TRAP_BARRIER:
i = caml_getword(dbg_in); i = caml_getword(dbg_in);
caml_trap_barrier = caml_stack_high - i; Caml_state->trap_barrier = Caml_state->stack_high - i;
break; break;
case REQ_GET_LOCAL: case REQ_GET_LOCAL:
i = caml_getword(dbg_in); i = caml_getword(dbg_in);
@ -536,7 +536,7 @@ void caml_debugger(enum event_kind event, value param)
caml_flush(dbg_out); caml_flush(dbg_out);
break; break;
case REQ_GET_ACCU: case REQ_GET_ACCU:
putval(dbg_out, *caml_extern_sp); putval(dbg_out, *Caml_state->extern_sp);
caml_flush(dbg_out); caml_flush(dbg_out);
break; break;
case REQ_GET_HEADER: case REQ_GET_HEADER:

83
runtime/domain.c Normal file
View File

@ -0,0 +1,83 @@
/**************************************************************************/
/* */
/* OCaml */
/* */
/* KC Sivaramakrishnan, Indian Institute of Technology, Madras */
/* Stephen Dolan, University of Cambridge */
/* */
/* Copyright 2019 Indian Institute of Technology, Madras */
/* Copyright 2019 University of Cambridge */
/* */
/* All rights reserved. This file is distributed under the terms of */
/* the GNU Lesser General Public License version 2.1, with the */
/* special exception on linking described in the file LICENSE. */
/* */
/**************************************************************************/
#define CAML_INTERNALS
#include "caml/domain_state.h"
#include "caml/memory.h"
CAMLexport caml_domain_state* Caml_state;
void caml_init_domain ()
{
if (Caml_state != NULL)
return;
Caml_state =
(caml_domain_state*)caml_stat_alloc_noexc(sizeof(caml_domain_state));
if (Caml_state == NULL)
caml_fatal_error ("cannot initialize domain state");
Caml_state->young_limit = NULL;
Caml_state->exception_pointer = NULL;
Caml_state->young_ptr = NULL;
Caml_state->young_base = NULL;
Caml_state->young_start = NULL;
Caml_state->young_end = NULL;
Caml_state->young_alloc_start = NULL;
Caml_state->young_alloc_mid = NULL;
Caml_state->young_alloc_end = NULL;
Caml_state->young_trigger = NULL;
Caml_state->minor_heap_wsz = 0;
Caml_state->in_minor_collection = 0;
Caml_state->extra_heap_resources_minor = 0;
caml_alloc_minor_tables();
Caml_state->stack_low = NULL;
Caml_state->stack_high = NULL;
Caml_state->stack_threshold = NULL;
Caml_state->extern_sp = NULL;
Caml_state->trapsp = NULL;
Caml_state->trap_barrier = NULL;
Caml_state->external_raise = NULL;
Caml_state->exn_bucket = Val_unit;
Caml_state->top_of_stack = NULL;
Caml_state->bottom_of_stack = NULL; /* no stack initially */
Caml_state->last_return_address = 1; /* not in OCaml code initially */
Caml_state->gc_regs = NULL;
Caml_state->stat_minor_words = 0.0;
Caml_state->stat_promoted_words = 0.0;
Caml_state->stat_major_words = 0.0;
Caml_state->stat_minor_collections = 0;
Caml_state->stat_major_collections = 0;
Caml_state->stat_heap_wsz = 0;
Caml_state->stat_top_heap_wsz = 0;
Caml_state->stat_compactions = 0;
Caml_state->stat_heap_chunks = 0;
Caml_state->backtrace_active = 0;
Caml_state->backtrace_pos = 0;
Caml_state->backtrace_buffer = NULL;
Caml_state->backtrace_last_exn = Val_unit;
Caml_state->compare_unordered = 0;
Caml_state->local_roots = NULL;
Caml_state->requested_major_slice = 0;
Caml_state->requested_minor_gc = 0;
}

View File

@ -30,15 +30,12 @@
#include "caml/signals.h" #include "caml/signals.h"
#include "caml/stacks.h" #include "caml/stacks.h"
CAMLexport struct longjmp_buffer * caml_external_raise = NULL;
value caml_exn_bucket;
CAMLexport void caml_raise(value v) CAMLexport void caml_raise(value v)
{ {
Unlock_exn(); Unlock_exn();
caml_exn_bucket = v; Caml_state->exn_bucket = v;
if (caml_external_raise == NULL) caml_fatal_uncaught_exception(v); if (Caml_state->external_raise == NULL) caml_fatal_uncaught_exception(v);
siglongjmp(caml_external_raise->buf, 1); siglongjmp(Caml_state->external_raise->buf, 1);
} }
CAMLexport void caml_raise_constant(value tag) CAMLexport void caml_raise_constant(value tag)

View File

@ -20,6 +20,7 @@
#include <stdio.h> #include <stdio.h>
#include <signal.h> #include <signal.h>
#include "caml/alloc.h" #include "caml/alloc.h"
#include "caml/domain.h"
#include "caml/fail.h" #include "caml/fail.h"
#include "caml/io.h" #include "caml/io.h"
#include "caml/gc.h" #include "caml/gc.h"
@ -52,22 +53,20 @@ extern caml_generated_constant
/* Exception raising */ /* Exception raising */
CAMLnoreturn_start CAMLnoreturn_start
extern void caml_raise_exception (value bucket) extern void caml_raise_exception (caml_domain_state* state, value bucket)
CAMLnoreturn_end; CAMLnoreturn_end;
char * caml_exception_pointer = NULL;
void caml_raise(value v) void caml_raise(value v)
{ {
Unlock_exn(); Unlock_exn();
if (caml_exception_pointer == NULL) caml_fatal_uncaught_exception(v); if (Caml_state->exception_pointer == NULL) caml_fatal_uncaught_exception(v);
while (caml_local_roots != NULL && while (Caml_state->local_roots != NULL &&
(char *) caml_local_roots < caml_exception_pointer) { (char *) Caml_state->local_roots < Caml_state->exception_pointer) {
caml_local_roots = caml_local_roots->next; Caml_state->local_roots = Caml_state->local_roots->next;
} }
caml_raise_exception(v); caml_raise_exception(Caml_state, v);
} }
void caml_raise_constant(value tag) void caml_raise_constant(value tag)

View File

@ -41,17 +41,6 @@
extern uintnat caml_max_stack_size; /* defined in stacks.c */ extern uintnat caml_max_stack_size; /* defined in stacks.c */
#endif #endif
double caml_stat_minor_words = 0.0,
caml_stat_promoted_words = 0.0,
caml_stat_major_words = 0.0;
intnat caml_stat_minor_collections = 0,
caml_stat_major_collections = 0,
caml_stat_heap_wsz = 0,
caml_stat_top_heap_wsz = 0,
caml_stat_compactions = 0,
caml_stat_heap_chunks = 0;
extern uintnat caml_major_heap_increment; /* percent or words; see major_gc.c */ extern uintnat caml_major_heap_increment; /* percent or words; see major_gc.c */
extern uintnat caml_percent_free; /* see major_gc.c */ extern uintnat caml_percent_free; /* see major_gc.c */
extern uintnat caml_percent_max; /* see compact.c */ extern uintnat caml_percent_max; /* see compact.c */
@ -225,22 +214,24 @@ static value heap_stats (int returnstats)
caml_final_invariant_check(); caml_final_invariant_check();
#endif #endif
CAMLassert (heap_chunks == caml_stat_heap_chunks); CAMLassert (heap_chunks == Caml_state->stat_heap_chunks);
CAMLassert (live_words + free_words + fragments == caml_stat_heap_wsz); CAMLassert (live_words + free_words + fragments == Caml_state->stat_heap_wsz);
if (returnstats){ if (returnstats){
CAMLlocal1 (res); CAMLlocal1 (res);
/* get a copy of these before allocating anything... */ /* get a copy of these before allocating anything... */
double minwords = caml_stat_minor_words double minwords =
+ (double) (caml_young_alloc_end - caml_young_ptr); Caml_state->stat_minor_words
double prowords = caml_stat_promoted_words; + (double) (Caml_state->young_alloc_end - Caml_state->young_ptr);
double majwords = caml_stat_major_words + (double) caml_allocated_words; double prowords = Caml_state->stat_promoted_words;
intnat mincoll = caml_stat_minor_collections; double majwords =
intnat majcoll = caml_stat_major_collections; Caml_state->stat_major_words + (double) caml_allocated_words;
intnat heap_words = caml_stat_heap_wsz; intnat mincoll = Caml_state->stat_minor_collections;
intnat cpct = caml_stat_compactions; intnat majcoll = Caml_state->stat_major_collections;
intnat top_heap_words = caml_stat_top_heap_wsz; intnat heap_words = Caml_state->stat_heap_wsz;
intnat cpct = Caml_state->stat_compactions;
intnat top_heap_words = Caml_state->stat_top_heap_wsz;
res = caml_alloc_tuple (16); res = caml_alloc_tuple (16);
Store_field (res, 0, caml_copy_double (minwords)); Store_field (res, 0, caml_copy_double (minwords));
@ -288,16 +279,18 @@ CAMLprim value caml_gc_quick_stat(value v)
CAMLlocal1 (res); CAMLlocal1 (res);
/* get a copy of these before allocating anything... */ /* get a copy of these before allocating anything... */
double minwords = caml_stat_minor_words double minwords =
+ (double) (caml_young_alloc_end - caml_young_ptr); Caml_state->stat_minor_words
double prowords = caml_stat_promoted_words; + (double) (Caml_state->young_alloc_end - Caml_state->young_ptr);
double majwords = caml_stat_major_words + (double) caml_allocated_words; double prowords = Caml_state->stat_promoted_words;
intnat mincoll = caml_stat_minor_collections; double majwords =
intnat majcoll = caml_stat_major_collections; Caml_state->stat_major_words + (double) caml_allocated_words;
intnat heap_words = caml_stat_heap_wsz; intnat mincoll = Caml_state->stat_minor_collections;
intnat top_heap_words = caml_stat_top_heap_wsz; intnat majcoll = Caml_state->stat_major_collections;
intnat cpct = caml_stat_compactions; intnat heap_words = Caml_state->stat_heap_wsz;
intnat heap_chunks = caml_stat_heap_chunks; intnat top_heap_words = Caml_state->stat_top_heap_wsz;
intnat cpct = Caml_state->stat_compactions;
intnat heap_chunks = Caml_state->stat_heap_chunks;
res = caml_alloc_tuple (16); res = caml_alloc_tuple (16);
Store_field (res, 0, caml_copy_double (minwords)); Store_field (res, 0, caml_copy_double (minwords));
@ -321,8 +314,8 @@ CAMLprim value caml_gc_quick_stat(value v)
double caml_gc_minor_words_unboxed() double caml_gc_minor_words_unboxed()
{ {
return (caml_stat_minor_words return (Caml_state->stat_minor_words
+ (double) (caml_young_alloc_end - caml_young_ptr)); + (double) (Caml_state->young_alloc_end - Caml_state->young_ptr));
} }
CAMLprim value caml_gc_minor_words(value v) CAMLprim value caml_gc_minor_words(value v)
@ -337,10 +330,12 @@ CAMLprim value caml_gc_counters(value v)
CAMLlocal1 (res); CAMLlocal1 (res);
/* get a copy of these before allocating anything... */ /* get a copy of these before allocating anything... */
double minwords = caml_stat_minor_words double minwords =
+ (double) (caml_young_alloc_end - caml_young_ptr); Caml_state->stat_minor_words
double prowords = caml_stat_promoted_words; + (double) (Caml_state->young_alloc_end - Caml_state->young_ptr);
double majwords = caml_stat_major_words + (double) caml_allocated_words; double prowords = Caml_state->stat_promoted_words;
double majwords =
Caml_state->stat_major_words + (double) caml_allocated_words;
res = caml_alloc_tuple (3); res = caml_alloc_tuple (3);
Store_field (res, 0, caml_copy_double (minwords)); Store_field (res, 0, caml_copy_double (minwords));
@ -360,7 +355,7 @@ CAMLprim value caml_gc_get(value v)
CAMLlocal1 (res); CAMLlocal1 (res);
res = caml_alloc_tuple (11); res = caml_alloc_tuple (11);
Store_field (res, 0, Val_long (caml_minor_heap_wsz)); /* s */ Store_field (res, 0, Val_long (Caml_state->minor_heap_wsz)); /* s */
Store_field (res, 1, Val_long (caml_major_heap_increment)); /* i */ Store_field (res, 1, Val_long (caml_major_heap_increment)); /* i */
Store_field (res, 2, Val_long (caml_percent_free)); /* o */ Store_field (res, 2, Val_long (caml_percent_free)); /* o */
Store_field (res, 3, Val_long (caml_verb_gc)); /* v */ Store_field (res, 3, Val_long (caml_verb_gc)); /* v */
@ -501,7 +496,7 @@ CAMLprim value caml_gc_set(value v)
/* Minor heap size comes last because it will trigger a minor collection /* Minor heap size comes last because it will trigger a minor collection
(thus invalidating [v]) and it can raise [Out_of_memory]. */ (thus invalidating [v]) and it can raise [Out_of_memory]. */
newminwsz = norm_minsize (Long_val (Field (v, 0))); newminwsz = norm_minsize (Long_val (Field (v, 0)));
if (newminwsz != caml_minor_heap_wsz){ if (newminwsz != Caml_state->minor_heap_wsz){
caml_gc_message (0x20, "New minor heap size: %" caml_gc_message (0x20, "New minor heap size: %"
ARCH_SIZET_PRINTF_FORMAT "uk words\n", newminwsz / 1024); ARCH_SIZET_PRINTF_FORMAT "uk words\n", newminwsz / 1024);
caml_set_minor_heap_size (Bsize_wsize (newminwsz)); caml_set_minor_heap_size (Bsize_wsize (newminwsz));
@ -525,7 +520,7 @@ static void test_and_compact (void)
{ {
double fp; double fp;
fp = 100.0 * caml_fl_cur_wsz / (caml_stat_heap_wsz - caml_fl_cur_wsz); fp = 100.0 * caml_fl_cur_wsz / (Caml_state->stat_heap_wsz - caml_fl_cur_wsz);
if (fp > 999999.0) fp = 999999.0; if (fp > 999999.0) fp = 999999.0;
caml_gc_message (0x200, "Estimated overhead (lower bound) = %" caml_gc_message (0x200, "Estimated overhead (lower bound) = %"
ARCH_INTNAT_PRINTF_FORMAT "u%%\n", ARCH_INTNAT_PRINTF_FORMAT "u%%\n",
@ -592,7 +587,7 @@ CAMLprim value caml_gc_compaction(value v)
CAMLprim value caml_get_minor_free (value v) CAMLprim value caml_get_minor_free (value v)
{ {
return Val_int (caml_young_ptr - caml_young_alloc_start); return Val_int (Caml_state->young_ptr - Caml_state->young_alloc_start);
} }
CAMLprim value caml_get_major_bucket (value v) CAMLprim value caml_get_major_bucket (value v)
@ -652,7 +647,7 @@ void caml_init_gc (uintnat minor_size, uintnat major_size,
caml_custom_minor_max_bsz = custom_bsz; caml_custom_minor_max_bsz = custom_bsz;
caml_gc_message (0x20, "Initial minor heap size: %" caml_gc_message (0x20, "Initial minor heap size: %"
ARCH_SIZET_PRINTF_FORMAT "uk words\n", ARCH_SIZET_PRINTF_FORMAT "uk words\n",
caml_minor_heap_wsz / 1024); Caml_state->minor_heap_wsz / 1024);
caml_gc_message (0x20, "Initial major heap size: %" caml_gc_message (0x20, "Initial major heap size: %"
ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n", ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n",
major_heap_size / 1024); major_heap_size / 1024);
@ -702,7 +697,7 @@ CAMLprim value caml_runtime_parameters (value unit)
("a=%d,b=%d,H=%"F_Z"u,i=%"F_Z"u,l=%"F_Z"u,o=%"F_Z"u,O=%"F_Z"u,p=%d," ("a=%d,b=%d,H=%"F_Z"u,i=%"F_Z"u,l=%"F_Z"u,o=%"F_Z"u,O=%"F_Z"u,p=%d,"
"s=%"F_S"u,t=%"F_Z"u,v=%"F_Z"u,w=%d,W=%"F_Z"u", "s=%"F_S"u,t=%"F_Z"u,v=%"F_Z"u,w=%d,W=%"F_Z"u",
/* a */ (int) caml_allocation_policy, /* a */ (int) caml_allocation_policy,
/* b */ caml_backtrace_active, /* b */ (int) Caml_state->backtrace_active,
/* h */ /* missing */ /* FIXME add when changed to min_heap_size */ /* h */ /* missing */ /* FIXME add when changed to min_heap_size */
/* H */ caml_use_huge_pages, /* H */ caml_use_huge_pages,
/* i */ caml_major_heap_increment, /* i */ caml_major_heap_increment,
@ -715,7 +710,7 @@ CAMLprim value caml_runtime_parameters (value unit)
/* O */ caml_percent_max, /* O */ caml_percent_max,
/* p */ caml_parser_trace, /* p */ caml_parser_trace,
/* R */ /* missing */ /* R */ /* missing */
/* s */ caml_minor_heap_wsz, /* s */ Caml_state->minor_heap_wsz,
/* t */ caml_trace_level, /* t */ caml_trace_level,
/* v */ caml_verb_gc, /* v */ caml_verb_gc,
/* w */ caml_major_window, /* w */ caml_major_window,

View File

@ -0,0 +1,36 @@
#**************************************************************************
#* *
#* OCaml *
#* *
#* KC Sivaramakrishnan, Indian Institute of Technology, Madras *
#* *
#* Copyright 2019 Indian Institute of Technology, Madras *
#* *
#* All rights reserved. This file is distributed under the terms of *
#* the GNU Lesser General Public License version 2.1, with the *
#* special exception on linking described in the file LICENSE. *
#* *
#**************************************************************************
BEGIN{FS="[,)] *";count=0};
/DOMAIN_STATE/{
print "Store_" $2 " MACRO reg1, reg2";
print " mov [reg1+" count "], reg2";
print "ENDM";
print "Load_" $2 " MACRO reg1, reg2";
print " mov reg2, [reg1+" count "]";
print "ENDM";
print "Push_" $2 " MACRO reg1";
print " push [reg1+" count "]";
print "ENDM";
print "Pop_" $2 " MACRO reg1";
print " pop [reg1+" count "]";
print "ENDM";
print "Cmp_" $2 " MACRO reg1, reg2";
print " cmp reg2, [reg1+" count "]";
print "ENDM";
print "Sub_" $2 " MACRO reg1, reg2";
print " sub reg2, [reg1+" count "]";
print "ENDM";
count+=8
}

View File

@ -0,0 +1,33 @@
#**************************************************************************
#* *
#* OCaml *
#* *
#* KC Sivaramakrishnan, Indian Institute of Technology, Madras *
#* *
#* Copyright 2019 Indian Institute of Technology, Madras *
#* *
#* All rights reserved. This file is distributed under the terms of *
#* the GNU Lesser General Public License version 2.1, with the *
#* special exception on linking described in the file LICENSE. *
#* *
#**************************************************************************
BEGIN{FS="[,)] *";count=0};
/DOMAIN_STATE/{
print "Store_" $2 " MACRO reg";
print " mov [r14+" count "], reg";
print "ENDM";
print "Load_" $2 " MACRO reg";
print " mov reg, [r14+" count "]";
print "ENDM";
print "Push_" $2 " MACRO";
print " push [r14+" count "]";
print "ENDM";
print "Pop_" $2 " MACRO";
print " pop [r14+" count "]";
print "ENDM";
print "Cmp_" $2 " MACRO reg";
print " cmp reg, [r14+" count "]";
print "ENDM";
count+=8
}

View File

@ -82,6 +82,15 @@
#define STACK_PROBE_SIZE 16384 #define STACK_PROBE_SIZE 16384
#endif #endif
.set domain_curr_field, 0
#define DOMAIN_STATE(c_type, name) \
.equ domain_field_caml_##name, domain_curr_field ; \
.set domain_curr_field, domain_curr_field + 1
#include "../runtime/caml/domain_state.tbl"
#undef DOMAIN_STATE
#define CAML_STATE(var,reg) 8*domain_field_caml_##var(reg)
/* PR#6038: GCC and Clang seem to require 16-byte alignment nowadays, /* PR#6038: GCC and Clang seem to require 16-byte alignment nowadays,
even if only MacOS X's ABI formally requires it. */ even if only MacOS X's ABI formally requires it. */
#define ALIGN_STACK(amount) subl $ amount, %esp ; CFI_ADJUST(amount) #define ALIGN_STACK(amount) subl $ amount, %esp ; CFI_ADJUST(amount)
@ -105,10 +114,13 @@ G(caml_system__code_begin):
FUNCTION(caml_call_gc) FUNCTION(caml_call_gc)
CFI_STARTPROC CFI_STARTPROC
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
movl 0(%esp), %eax pushl %ebx; CFI_ADJUST(4)
movl %eax, G(caml_last_return_address) movl G(Caml_state), %ebx
leal 4(%esp), %eax movl 4(%esp), %eax
movl %eax, G(caml_bottom_of_stack) movl %eax, CAML_STATE(last_return_address, %ebx)
leal 8(%esp), %eax
movl %eax, CAML_STATE(bottom_of_stack, %ebx)
popl %ebx; CFI_ADJUST(-4)
LBL(105): LBL(105):
#if !defined(SYS_mingw) && !defined(SYS_cygwin) #if !defined(SYS_mingw) && !defined(SYS_cygwin)
/* Touch the stack to trigger a recoverable segfault /* Touch the stack to trigger a recoverable segfault
@ -125,7 +137,8 @@ LBL(105):
pushl %ecx; CFI_ADJUST(4) pushl %ecx; CFI_ADJUST(4)
pushl %ebx; CFI_ADJUST(4) pushl %ebx; CFI_ADJUST(4)
pushl %eax; CFI_ADJUST(4) pushl %eax; CFI_ADJUST(4)
movl %esp, G(caml_gc_regs) movl G(Caml_state), %ebx
movl %esp, CAML_STATE(gc_regs, %ebx)
/* MacOSX note: 16-alignment of stack preserved at this point */ /* MacOSX note: 16-alignment of stack preserved at this point */
/* Call the garbage collector */ /* Call the garbage collector */
call G(caml_garbage_collection) call G(caml_garbage_collection)
@ -144,17 +157,21 @@ LBL(105):
FUNCTION(caml_alloc1) FUNCTION(caml_alloc1)
CFI_STARTPROC CFI_STARTPROC
movl G(caml_young_ptr), %eax pushl %ebx; CFI_ADJUST(4)
movl G(Caml_state), %ebx
movl CAML_STATE(young_ptr, %ebx), %eax
subl $8, %eax subl $8, %eax
cmpl G(caml_young_limit), %eax cmpl CAML_STATE(young_limit, %ebx), %eax
jb LBL(100) jb LBL(100)
movl %eax, G(caml_young_ptr) movl %eax, CAML_STATE(young_ptr, %ebx)
popl %ebx; CFI_ADJUST(-4)
ret ret
LBL(100): LBL(100):
movl 0(%esp), %eax movl 4(%esp), %eax
movl %eax, G(caml_last_return_address) movl %eax, CAML_STATE(last_return_address, %ebx)
leal 4(%esp), %eax leal 8(%esp), %eax
movl %eax, G(caml_bottom_of_stack) movl %eax, CAML_STATE(bottom_of_stack, %ebx)
popl %ebx; CFI_ADJUST(-4)
ALIGN_STACK(12) ALIGN_STACK(12)
call LBL(105) call LBL(105)
UNDO_ALIGN_STACK(12) UNDO_ALIGN_STACK(12)
@ -164,17 +181,21 @@ LBL(100):
FUNCTION(caml_alloc2) FUNCTION(caml_alloc2)
CFI_STARTPROC CFI_STARTPROC
movl G(caml_young_ptr), %eax pushl %ebx; CFI_ADJUST(4)
movl G(Caml_state), %ebx
movl CAML_STATE(young_ptr, %ebx), %eax
subl $12, %eax subl $12, %eax
cmpl G(caml_young_limit), %eax cmpl CAML_STATE(young_limit, %ebx), %eax
jb LBL(101) jb LBL(101)
movl %eax, G(caml_young_ptr) movl %eax, CAML_STATE(young_ptr, %ebx)
popl %ebx; CFI_ADJUST(-4)
ret ret
LBL(101): LBL(101):
movl 0(%esp), %eax movl 4(%esp), %eax
movl %eax, G(caml_last_return_address) movl %eax, CAML_STATE(last_return_address, %ebx)
leal 4(%esp), %eax leal 8(%esp), %eax
movl %eax, G(caml_bottom_of_stack) movl %eax, CAML_STATE(bottom_of_stack, %ebx)
popl %ebx; CFI_ADJUST(-4)
ALIGN_STACK(12) ALIGN_STACK(12)
call LBL(105) call LBL(105)
UNDO_ALIGN_STACK(12) UNDO_ALIGN_STACK(12)
@ -184,17 +205,21 @@ LBL(101):
FUNCTION(caml_alloc3) FUNCTION(caml_alloc3)
CFI_STARTPROC CFI_STARTPROC
movl G(caml_young_ptr), %eax pushl %ebx; CFI_ADJUST(4)
movl G(Caml_state), %ebx
movl CAML_STATE(young_ptr, %ebx), %eax
subl $16, %eax subl $16, %eax
cmpl G(caml_young_limit), %eax cmpl CAML_STATE(young_limit, %ebx), %eax
jb LBL(102) jb LBL(102)
movl %eax, G(caml_young_ptr) movl %eax, CAML_STATE(young_ptr, %ebx)
popl %ebx; CFI_ADJUST(-4)
ret ret
LBL(102): LBL(102):
movl 0(%esp), %eax movl 4(%esp), %eax
movl %eax, G(caml_last_return_address) movl %eax, CAML_STATE(last_return_address, %ebx)
leal 4(%esp), %eax leal 8(%esp), %eax
movl %eax, G(caml_bottom_of_stack) movl %eax, CAML_STATE(bottom_of_stack, %ebx)
popl %ebx; CFI_ADJUST(-4)
ALIGN_STACK(12) ALIGN_STACK(12)
call LBL(105) call LBL(105)
UNDO_ALIGN_STACK(12) UNDO_ALIGN_STACK(12)
@ -204,20 +229,24 @@ LBL(102):
FUNCTION(caml_allocN) FUNCTION(caml_allocN)
CFI_STARTPROC CFI_STARTPROC
subl G(caml_young_ptr), %eax /* eax = size - caml_young_ptr */ pushl %eax; CFI_ADJUST(4) /* saved desired size */
pushl %ebx; CFI_ADJUST(4)
movl G(Caml_state), %ebx
/* eax = size - caml_young_ptr */
subl CAML_STATE(young_ptr, %ebx), %eax
negl %eax /* eax = caml_young_ptr - size */ negl %eax /* eax = caml_young_ptr - size */
cmpl G(caml_young_limit), %eax cmpl CAML_STATE(young_limit, %ebx), %eax
jb LBL(103) jb LBL(103)
movl %eax, G(caml_young_ptr) movl %eax, CAML_STATE(young_ptr, %ebx)
popl %ebx; CFI_ADJUST(-4)
addl $4, %esp; CFI_ADJUST(-4) /* drop desired size */
ret ret
LBL(103): LBL(103):
subl G(caml_young_ptr), %eax /* eax = - size */ movl 8(%esp), %eax
negl %eax /* eax = size */ movl %eax, CAML_STATE(last_return_address, %ebx)
pushl %eax; CFI_ADJUST(4) /* save desired size */ leal 12(%esp), %eax
movl 4(%esp), %eax movl %eax, CAML_STATE(bottom_of_stack, %ebx)
movl %eax, G(caml_last_return_address) popl %ebx; CFI_ADJUST(-4)
leal 8(%esp), %eax
movl %eax, G(caml_bottom_of_stack)
ALIGN_STACK(8) ALIGN_STACK(8)
call LBL(105) call LBL(105)
UNDO_ALIGN_STACK(8) UNDO_ALIGN_STACK(8)
@ -231,10 +260,12 @@ LBL(103):
FUNCTION(caml_c_call) FUNCTION(caml_c_call)
CFI_STARTPROC CFI_STARTPROC
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
/* ecx and edx are destroyed at C call. Use them as temp. */
movl G(Caml_state), %ecx
movl (%esp), %edx movl (%esp), %edx
movl %edx, G(caml_last_return_address) movl %edx, CAML_STATE(last_return_address, %ecx)
leal 4(%esp), %edx leal 4(%esp), %edx
movl %edx, G(caml_bottom_of_stack) movl %edx, CAML_STATE(bottom_of_stack, %ecx)
#if !defined(SYS_mingw) && !defined(SYS_cygwin) #if !defined(SYS_mingw) && !defined(SYS_cygwin)
/* Touch the stack to trigger a recoverable segfault /* Touch the stack to trigger a recoverable segfault
if insufficient space remains */ if insufficient space remains */
@ -260,27 +291,30 @@ FUNCTION(caml_start_program)
movl $ G(caml_program), %esi movl $ G(caml_program), %esi
/* Common code for caml_start_program and caml_callback* */ /* Common code for caml_start_program and caml_callback* */
LBL(106): LBL(106):
movl G(Caml_state), %edi
/* Build a callback link */ /* Build a callback link */
pushl G(caml_gc_regs); CFI_ADJUST(4) pushl CAML_STATE(gc_regs, %edi); CFI_ADJUST(4)
pushl G(caml_last_return_address); CFI_ADJUST(4) pushl CAML_STATE(last_return_address, %edi); CFI_ADJUST(4)
pushl G(caml_bottom_of_stack); CFI_ADJUST(4) pushl CAML_STATE(bottom_of_stack, %edi); CFI_ADJUST(4)
/* Note: 16-alignment preserved on MacOSX at this point */ /* Note: 16-alignment preserved on MacOSX at this point */
/* Build an exception handler */ /* Build an exception handler */
pushl $ LBL(108); CFI_ADJUST(4) pushl $ LBL(108); CFI_ADJUST(4)
ALIGN_STACK(8) ALIGN_STACK(8)
pushl G(caml_exception_pointer); CFI_ADJUST(4) pushl CAML_STATE(exception_pointer, %edi); CFI_ADJUST(4)
movl %esp, G(caml_exception_pointer) movl %esp, CAML_STATE(exception_pointer, %edi)
/* Call the OCaml code */ /* Call the OCaml code */
call *%esi call *%esi
LBL(107): LBL(107):
movl G(Caml_state), %edi
/* Pop the exception handler */ /* Pop the exception handler */
popl G(caml_exception_pointer); CFI_ADJUST(-4) popl CAML_STATE(exception_pointer, %edi); CFI_ADJUST(-4)
addl $12, %esp ; CFI_ADJUST(-12) addl $12, %esp ; CFI_ADJUST(-12)
LBL(109): LBL(109):
movl G(Caml_state), %edi /* Reload for LBL(109) entry */
/* Pop the callback link, restoring the global variables */ /* Pop the callback link, restoring the global variables */
popl G(caml_bottom_of_stack); CFI_ADJUST(-4) popl CAML_STATE(bottom_of_stack, %edi); CFI_ADJUST(-4)
popl G(caml_last_return_address); CFI_ADJUST(-4) popl CAML_STATE(last_return_address, %edi); CFI_ADJUST(-4)
popl G(caml_gc_regs); CFI_ADJUST(-4) popl CAML_STATE(gc_regs, %edi); CFI_ADJUST(-4)
/* Restore callee-save registers. */ /* Restore callee-save registers. */
popl %ebp; CFI_ADJUST(-4) popl %ebp; CFI_ADJUST(-4)
popl %edi; CFI_ADJUST(-4) popl %edi; CFI_ADJUST(-4)
@ -300,15 +334,16 @@ LBL(108):
FUNCTION(caml_raise_exn) FUNCTION(caml_raise_exn)
CFI_STARTPROC CFI_STARTPROC
testl $1, G(caml_backtrace_active) movl G(Caml_state), %ebx
testl $1, CAML_STATE(backtrace_active, %ebx)
jne LBL(110) jne LBL(110)
movl G(caml_exception_pointer), %esp movl CAML_STATE(exception_pointer, %ebx), %esp
popl G(caml_exception_pointer); CFI_ADJUST(-4) popl CAML_STATE(exception_pointer, %ebx); CFI_ADJUST(-4)
UNDO_ALIGN_STACK(8) UNDO_ALIGN_STACK(8)
ret ret
LBL(110): LBL(110):
movl %eax, %esi /* Save exception bucket in esi */ movl %eax, %esi /* Save exception bucket in esi */
movl G(caml_exception_pointer), %edi /* SP of handler */ movl CAML_STATE(exception_pointer, %ebx), %edi /* SP of handler */
movl 0(%esp), %eax /* PC of raise */ movl 0(%esp), %eax /* PC of raise */
leal 4(%esp), %edx /* SP of raise */ leal 4(%esp), %edx /* SP of raise */
ALIGN_STACK(12) ALIGN_STACK(12)
@ -319,7 +354,7 @@ LBL(110):
call G(caml_stash_backtrace) call G(caml_stash_backtrace)
movl %esi, %eax /* Recover exception bucket */ movl %esi, %eax /* Recover exception bucket */
movl %edi, %esp movl %edi, %esp
popl G(caml_exception_pointer); CFI_ADJUST(-4) popl CAML_STATE(exception_pointer, %ebx); CFI_ADJUST(-4)
UNDO_ALIGN_STACK(8) UNDO_ALIGN_STACK(8)
ret ret
CFI_ENDPROC CFI_ENDPROC
@ -329,24 +364,29 @@ LBL(110):
FUNCTION(caml_raise_exception) FUNCTION(caml_raise_exception)
CFI_STARTPROC CFI_STARTPROC
testl $1, G(caml_backtrace_active) movl G(Caml_state), %ebx
testl $1, CAML_STATE(backtrace_active, %ebx)
jne LBL(112) jne LBL(112)
movl 4(%esp), %eax movl 8(%esp), %eax
movl G(caml_exception_pointer), %esp movl CAML_STATE(exception_pointer, %ebx), %esp
popl G(caml_exception_pointer); CFI_ADJUST(-4) popl CAML_STATE(exception_pointer, %ebx); CFI_ADJUST(-4)
UNDO_ALIGN_STACK(8) UNDO_ALIGN_STACK(8)
ret ret
LBL(112): LBL(112):
movl 4(%esp), %esi /* Save exception bucket in esi */ movl 8(%esp), %esi /* Save exception bucket in esi */
ALIGN_STACK(12) ALIGN_STACK(12)
pushl G(caml_exception_pointer); CFI_ADJUST(4) /* 4: sp of handler */ /* 4: sp of handler */
pushl G(caml_bottom_of_stack); CFI_ADJUST(4) /* 3: sp of raise */ pushl CAML_STATE(exception_pointer, %ebx); CFI_ADJUST(4)
pushl G(caml_last_return_address); CFI_ADJUST(4)/* 2: pc of raise */ /* 3: sp of raise */
pushl %esi; CFI_ADJUST(4) /* 1: exception bucket */ pushl CAML_STATE(bottom_of_stack, %ebx); CFI_ADJUST(4)
/* 2: pc of raise */
pushl CAML_STATE(last_return_address, %ebx); CFI_ADJUST(4)
/* 1: exception bucket */
pushl %esi; CFI_ADJUST(4)
call G(caml_stash_backtrace) call G(caml_stash_backtrace)
movl %esi, %eax /* Recover exception bucket */ movl %esi, %eax /* Recover exception bucket */
movl G(caml_exception_pointer), %esp movl CAML_STATE(exception_pointer, %ebx), %esp
popl G(caml_exception_pointer); CFI_ADJUST(-4) popl CAML_STATE(exception_pointer, %ebx); CFI_ADJUST(-4)
UNDO_ALIGN_STACK(8) UNDO_ALIGN_STACK(8)
ret ret
CFI_ENDPROC CFI_ENDPROC
@ -354,7 +394,7 @@ LBL(112):
/* Callback from C to OCaml */ /* Callback from C to OCaml */
FUNCTION(caml_callback_exn) FUNCTION(caml_callback_asm)
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
pushl %ebx; CFI_ADJUST(4) pushl %ebx; CFI_ADJUST(4)
@ -362,14 +402,15 @@ FUNCTION(caml_callback_exn)
pushl %edi; CFI_ADJUST(4) pushl %edi; CFI_ADJUST(4)
pushl %ebp; CFI_ADJUST(4) pushl %ebp; CFI_ADJUST(4)
/* Initial loading of arguments */ /* Initial loading of arguments */
movl 20(%esp), %ebx /* closure */ movl 24(%esp), %ebx /* arg2: closure */
movl 24(%esp), %eax /* argument */ movl 28(%esp), %edi /* arguments array */
movl 0(%edi), %eax /* arg1: argument */
movl 0(%ebx), %esi /* code pointer */ movl 0(%ebx), %esi /* code pointer */
jmp LBL(106) jmp LBL(106)
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(caml_callback_exn) ENDFUNCTION(caml_callback_asm)
FUNCTION(caml_callback2_exn) FUNCTION(caml_callback2_asm)
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
pushl %ebx; CFI_ADJUST(4) pushl %ebx; CFI_ADJUST(4)
@ -377,15 +418,16 @@ FUNCTION(caml_callback2_exn)
pushl %edi; CFI_ADJUST(4) pushl %edi; CFI_ADJUST(4)
pushl %ebp; CFI_ADJUST(4) pushl %ebp; CFI_ADJUST(4)
/* Initial loading of arguments */ /* Initial loading of arguments */
movl 20(%esp), %ecx /* closure */ movl 24(%esp), %ecx /* arg3: closure */
movl 24(%esp), %eax /* first argument */ movl 28(%esp), %edi /* arguments array */
movl 28(%esp), %ebx /* second argument */ movl 0(%edi), %eax /* arg1: first argument */
movl 4(%edi), %ebx /* arg2: second argument */
movl $ G(caml_apply2), %esi /* code pointer */ movl $ G(caml_apply2), %esi /* code pointer */
jmp LBL(106) jmp LBL(106)
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(caml_callback2_exn) ENDFUNCTION(caml_callback2_asm)
FUNCTION(caml_callback3_exn) FUNCTION(caml_callback3_asm)
CFI_STARTPROC CFI_STARTPROC
/* Save callee-save registers */ /* Save callee-save registers */
pushl %ebx; CFI_ADJUST(4) pushl %ebx; CFI_ADJUST(4)
@ -393,14 +435,15 @@ FUNCTION(caml_callback3_exn)
pushl %edi; CFI_ADJUST(4) pushl %edi; CFI_ADJUST(4)
pushl %ebp; CFI_ADJUST(4) pushl %ebp; CFI_ADJUST(4)
/* Initial loading of arguments */ /* Initial loading of arguments */
movl 20(%esp), %edx /* closure */ movl 24(%esp), %edx /* arg4: closure */
movl 24(%esp), %eax /* first argument */ movl 28(%esp), %edi /* arguments array */
movl 28(%esp), %ebx /* second argument */ movl 0(%edi), %eax /* arg1: first argument */
movl 32(%esp), %ecx /* third argument */ movl 4(%edi), %ebx /* arg2: second argument */
movl 8(%edi), %ecx /* arg3: third argument */
movl $ G(caml_apply3), %esi /* code pointer */ movl $ G(caml_apply3), %esi /* code pointer */
jmp LBL(106) jmp LBL(106)
CFI_ENDPROC CFI_ENDPROC
ENDFUNCTION(caml_callback3_exn) ENDFUNCTION(caml_callback3_asm)
FUNCTION(caml_ml_array_bound_error) FUNCTION(caml_ml_array_bound_error)
CFI_STARTPROC CFI_STARTPROC
@ -414,10 +457,11 @@ FUNCTION(caml_ml_array_bound_error)
ffree %st(6) ffree %st(6)
ffree %st(7) ffree %st(7)
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
movl G(Caml_state), %ebx
movl (%esp), %edx movl (%esp), %edx
movl %edx, G(caml_last_return_address) movl %edx, CAML_STATE(last_return_address, %ebx)
leal 4(%esp), %edx leal 4(%esp), %edx
movl %edx, G(caml_bottom_of_stack) movl %edx, CAML_STATE(bottom_of_stack, %ebx)
/* Re-align the stack */ /* Re-align the stack */
andl $-16, %esp andl $-16, %esp
/* Branch to [caml_array_bound_error] (never returns) */ /* Branch to [caml_array_bound_error] (never returns) */

View File

@ -23,31 +23,29 @@
EXTERN _caml_apply3: PROC EXTERN _caml_apply3: PROC
EXTERN _caml_program: PROC EXTERN _caml_program: PROC
EXTERN _caml_array_bound_error: PROC EXTERN _caml_array_bound_error: PROC
EXTERN _caml_young_limit: DWORD
EXTERN _caml_young_ptr: DWORD
EXTERN _caml_bottom_of_stack: DWORD
EXTERN _caml_last_return_address: DWORD
EXTERN _caml_gc_regs: DWORD
EXTERN _caml_exception_pointer: DWORD
EXTERN _caml_backtrace_pos: DWORD
EXTERN _caml_backtrace_active: DWORD
EXTERN _caml_stash_backtrace: PROC EXTERN _caml_stash_backtrace: PROC
EXTERN _Caml_state: DWORD
; Allocation ; Allocation
.CODE .CODE
PUBLIC _caml_call_gc
PUBLIC _caml_alloc1 PUBLIC _caml_alloc1
PUBLIC _caml_alloc2 PUBLIC _caml_alloc2
PUBLIC _caml_alloc3 PUBLIC _caml_alloc3
PUBLIC _caml_allocN PUBLIC _caml_allocN
PUBLIC _caml_call_gc
INCLUDE domain_state32.inc
_caml_call_gc: _caml_call_gc:
; Record lowest stack address and return address ; Record lowest stack address and return address
mov eax, [esp] push ebx ; make a tmp reg
mov _caml_last_return_address, eax mov ebx, _Caml_state
lea eax, [esp+4] mov eax, [esp+4]
mov _caml_bottom_of_stack, eax Store_last_return_address ebx, eax
lea eax, [esp+8]
Store_bottom_of_stack ebx, eax
pop ebx
; Save all regs used by the code generator ; Save all regs used by the code generator
L105: push ebp L105: push ebp
push edi push edi
@ -56,7 +54,8 @@ L105: push ebp
push ecx push ecx
push ebx push ebx
push eax push eax
mov _caml_gc_regs, esp mov ebx, _Caml_state
Store_gc_regs ebx, esp
; Call the garbage collector ; Call the garbage collector
call _caml_garbage_collection call _caml_garbage_collection
; Restore all regs used by the code generator ; Restore all regs used by the code generator
@ -72,64 +71,80 @@ L105: push ebp
ALIGN 4 ALIGN 4
_caml_alloc1: _caml_alloc1:
mov eax, _caml_young_ptr push ebx ; make a tmp reg
mov ebx, _Caml_state
Load_young_ptr ebx, eax
sub eax, 8 sub eax, 8
cmp eax, _caml_young_limit Cmp_young_limit ebx, eax
jb L100 jb L100
mov _caml_young_ptr, eax Store_young_ptr ebx, eax
pop ebx
ret ret
L100: mov eax, [esp] L100: mov eax, [esp + 4]
mov _caml_last_return_address, eax Store_last_return_address ebx, eax
lea eax, [esp+4] lea eax, [esp+8]
mov _caml_bottom_of_stack, eax Store_bottom_of_stack ebx, eax
pop ebx
call L105 call L105
jmp _caml_alloc1 jmp _caml_alloc1
ALIGN 4 ALIGN 4
_caml_alloc2: _caml_alloc2:
mov eax, _caml_young_ptr push ebx ; make a tmp reg
mov ebx, _Caml_state
Load_young_ptr ebx, eax
sub eax, 12 sub eax, 12
cmp eax, _caml_young_limit Cmp_young_limit ebx, eax
jb L101 jb L101
mov _caml_young_ptr, eax Store_young_ptr ebx, eax
pop ebx
ret ret
L101: mov eax, [esp] L101: mov eax, [esp+4]
mov _caml_last_return_address, eax Store_last_return_address ebx, eax
lea eax, [esp+4] lea eax, [esp+8]
mov _caml_bottom_of_stack, eax Store_bottom_of_stack ebx, eax
pop ebx
call L105 call L105
jmp _caml_alloc2 jmp _caml_alloc2
ALIGN 4 ALIGN 4
_caml_alloc3: _caml_alloc3:
mov eax, _caml_young_ptr push ebx ; make a tmp reg
mov ebx, _Caml_state
Load_young_ptr ebx, eax
sub eax, 16 sub eax, 16
cmp eax, _caml_young_limit Cmp_young_limit ebx, eax
jb L102 jb L102
mov _caml_young_ptr, eax Store_young_ptr ebx, eax
pop ebx
ret ret
L102: mov eax, [esp] L102: mov eax, [esp+4]
mov _caml_last_return_address, eax Store_last_return_address ebx, eax
lea eax, [esp+4] lea eax, [esp+8]
mov _caml_bottom_of_stack, eax Store_bottom_of_stack ebx, eax
pop ebx
call L105 call L105
jmp _caml_alloc3 jmp _caml_alloc3
ALIGN 4 ALIGN 4
_caml_allocN: _caml_allocN:
sub eax, _caml_young_ptr ; eax = size - young_ptr push eax ; Save desired size
neg eax ; eax = young_ptr - size push ebx ; Make a tmp reg
cmp eax, _caml_young_limit mov ebx, _Caml_state
Sub_young_ptr ebx, eax ; eax = size - young_ptr
neg eax ; eax = young_ptr - size
Cmp_young_limit ebx, eax
jb L103 jb L103
mov _caml_young_ptr, eax Store_young_ptr ebx, eax
pop ebx
add esp, 4 ; drop desired size
ret ret
L103: sub eax, _caml_young_ptr ; eax = - size L103: mov eax, [esp+8]
neg eax ; eax = size Store_last_return_address ebx, eax
push eax ; save desired size lea eax, [esp+12]
mov eax, [esp+4] Store_bottom_of_stack ebx, eax
mov _caml_last_return_address, eax pop ebx
lea eax, [esp+8]
mov _caml_bottom_of_stack, eax
call L105 call L105
pop eax ; recover desired size pop eax ; recover desired size
jmp _caml_allocN jmp _caml_allocN
@ -140,10 +155,12 @@ L103: sub eax, _caml_young_ptr ; eax = - size
ALIGN 4 ALIGN 4
_caml_c_call: _caml_c_call:
; Record lowest stack address and return address ; Record lowest stack address and return address
; ecx and edx are destroyed at C call. Use them as temp.
mov ecx, _Caml_state
mov edx, [esp] mov edx, [esp]
mov _caml_last_return_address, edx Store_last_return_address ecx, edx
lea edx, [esp+4] lea edx, [esp+4]
mov _caml_bottom_of_stack, edx Store_bottom_of_stack ecx, edx
; Call the function (address in %eax) ; Call the function (address in %eax)
jmp eax jmp eax
@ -163,26 +180,29 @@ _caml_start_program:
; Code shared between caml_start_program and callback* ; Code shared between caml_start_program and callback*
L106: L106:
mov edi, _Caml_state
; Build a callback link ; Build a callback link
push _caml_gc_regs Push_gc_regs edi
push _caml_last_return_address Push_last_return_address edi
push _caml_bottom_of_stack Push_bottom_of_stack edi
; Build an exception handler ; Build an exception handler
push L108 push L108
push _caml_exception_pointer Push_exception_pointer edi
mov _caml_exception_pointer, esp Store_exception_pointer edi, esp
; Call the OCaml code ; Call the OCaml code
call esi call esi
L107: L107:
mov edi, _Caml_state
; Pop the exception handler ; Pop the exception handler
pop _caml_exception_pointer Pop_exception_pointer edi
pop esi ; dummy register add esp, 4
L109: L109:
mov edi, _Caml_state
; Pop the callback link, restoring the global variables ; Pop the callback link, restoring the global variables
; used by caml_c_call ; used by caml_c_call
pop _caml_bottom_of_stack Pop_bottom_of_stack edi
pop _caml_last_return_address Pop_last_return_address edi
pop _caml_gc_regs Pop_gc_regs edi
; Restore callee-save registers. ; Restore callee-save registers.
pop ebp pop ebp
pop edi pop edi
@ -201,16 +221,18 @@ L108:
PUBLIC _caml_raise_exn PUBLIC _caml_raise_exn
ALIGN 4 ALIGN 4
_caml_raise_exn: _caml_raise_exn:
test _caml_backtrace_active, 1 mov ebx, _Caml_state
Load_backtrace_active ebx, ecx
test ecx, 1
jne L110 jne L110
mov esp, _caml_exception_pointer Load_exception_pointer ebx, esp
pop _caml_exception_pointer Pop_exception_pointer ebx
ret ret
L110: L110:
mov esi, eax ; Save exception bucket in esi mov esi, eax ; Save exception bucket in esi
mov edi, _caml_exception_pointer ; SP of handler Load_exception_pointer ebx, edi ; SP of handler
mov eax, [esp] ; PC of raise mov eax, [esp] ; PC of raise
lea edx, [esp+4] lea edx, [esp+4] ; SP of raise
push edi ; arg 4: SP of handler push edi ; arg 4: SP of handler
push edx ; arg 3: SP of raise push edx ; arg 3: SP of raise
push eax ; arg 2: PC of raise push eax ; arg 2: PC of raise
@ -218,7 +240,7 @@ L110:
call _caml_stash_backtrace call _caml_stash_backtrace
mov eax, esi ; recover exception bucket mov eax, esi ; recover exception bucket
mov esp, edi ; cut the stack mov esp, edi ; cut the stack
pop _caml_exception_pointer Pop_exception_pointer ebx
ret ret
; Raise an exception from C ; Raise an exception from C
@ -226,68 +248,73 @@ L110:
PUBLIC _caml_raise_exception PUBLIC _caml_raise_exception
ALIGN 4 ALIGN 4
_caml_raise_exception: _caml_raise_exception:
test _caml_backtrace_active, 1 mov ebx, _Caml_state
Load_backtrace_active ebx, ecx
test ecx, 1
jne L112 jne L112
mov eax, [esp+4] mov eax, [esp+8]
mov esp, _caml_exception_pointer Load_exception_pointer ebx, esp
pop _caml_exception_pointer Pop_exception_pointer ebx
ret ret
L112: L112:
mov esi, [esp+4] ; Save exception bucket in esi mov esi, [esp+8] ; Save exception bucket in esi
push _caml_exception_pointer ; arg 4: SP of handler Push_exception_pointer ebx ; arg 4: SP of handler
push _caml_bottom_of_stack ; arg 3: SP of raise Push_bottom_of_stack ebx ; arg 3: SP of raise
push _caml_last_return_address ; arg 2: PC of raise Push_last_return_address ebx ; arg 2: PC of raise
push esi ; arg 1: exception bucket push esi ; arg 1: exception bucket
call _caml_stash_backtrace call _caml_stash_backtrace
mov eax, esi ; recover exception bucket mov eax, esi ; recover exception bucket
mov esp, _caml_exception_pointer ; cut the stack Load_exception_pointer ebx, esp ; cut the stack
pop _caml_exception_pointer Pop_exception_pointer ebx
ret ret
; Callback from C to OCaml ; Callback from C to OCaml
PUBLIC _caml_callback_exn PUBLIC _caml_callback_asm
ALIGN 4 ALIGN 4
_caml_callback_exn: _caml_callback_asm:
; Save callee-save registers ; Save callee-save registers
push ebx push ebx
push esi push esi
push edi push edi
push ebp push ebp
; Initial loading of arguments ; Initial loading of arguments
mov ebx, [esp+20] ; closure mov ebx, [esp+24] ; arg2: closure
mov eax, [esp+24] ; argument mov edi, [esp+28] ; arguments array
mov eax, [edi] ; arg1: argument
mov esi, [ebx] ; code pointer mov esi, [ebx] ; code pointer
jmp L106 jmp L106
PUBLIC _caml_callback2_exn PUBLIC _caml_callback2_asm
ALIGN 4 ALIGN 4
_caml_callback2_exn: _caml_callback2_asm:
; Save callee-save registers ; Save callee-save registers
push ebx push ebx
push esi push esi
push edi push edi
push ebp push ebp
; Initial loading of arguments ; Initial loading of arguments
mov ecx, [esp+20] ; closure mov ecx, [esp+24] ; arg3: closure
mov eax, [esp+24] ; first argument mov edi, [esp+28] ; arguments array
mov ebx, [esp+28] ; second argument mov eax, [edi] ; arg1: first argument
mov ebx, [edi+4] ; arg2: second argument
mov esi, offset _caml_apply2 ; code pointer mov esi, offset _caml_apply2 ; code pointer
jmp L106 jmp L106
PUBLIC _caml_callback3_exn PUBLIC _caml_callback3_asm
ALIGN 4 ALIGN 4
_caml_callback3_exn: _caml_callback3_asm:
; Save callee-save registers ; Save callee-save registers
push ebx push ebx
push esi push esi
push edi push edi
push ebp push ebp
; Initial loading of arguments ; Initial loading of arguments
mov edx, [esp+20] ; closure mov edx, [esp+24] ; arg4: closure
mov eax, [esp+24] ; first argument mov edi, [esp+28] ; arguments array
mov ebx, [esp+28] ; second argument mov eax, [edi] ; arg1: first argument
mov ecx, [esp+32] ; third argument mov ebx, [edi+4] ; arg2: second argument
mov ecx, [edi+8] ; arg3: third argument
mov esi, offset _caml_apply3 ; code pointer mov esi, offset _caml_apply3 ; code pointer
jmp L106 jmp L106

View File

@ -190,9 +190,10 @@ caml_trace_value_file (value v, code_t prog, asize_t proglen, FILE * f)
fprintf (f, "=code@%ld", (long) ((code_t) v - prog)); fprintf (f, "=code@%ld", (long) ((code_t) v - prog));
else if (Is_long (v)) else if (Is_long (v))
fprintf (f, "=long%" ARCH_INTNAT_PRINTF_FORMAT "d", Long_val (v)); fprintf (f, "=long%" ARCH_INTNAT_PRINTF_FORMAT "d", Long_val (v));
else if ((void*)v >= (void*)caml_stack_low else if ((void*)v >= (void*)Caml_state->stack_low
&& (void*)v < (void*)caml_stack_high) && (void*)v < (void*)Caml_state->stack_high)
fprintf (f, "=stack_%ld", (long) ((intnat*)caml_stack_high - (intnat*)v)); fprintf (f, "=stack_%ld",
(long) ((intnat*)Caml_state->stack_high - (intnat*)v));
else if (Is_block (v)) { else if (Is_block (v)) {
int s = Wosize_val (v); int s = Wosize_val (v);
int tg = Tag_val (v); int tg = Tag_val (v);
@ -256,10 +257,11 @@ caml_trace_accu_sp_file (value accu, value * sp, code_t prog, asize_t proglen,
fprintf (f, "accu="); fprintf (f, "accu=");
caml_trace_value_file (accu, prog, proglen, f); caml_trace_value_file (accu, prog, proglen, f);
fprintf (f, "\n sp=%#" ARCH_INTNAT_PRINTF_FORMAT "x @%ld:", fprintf (f, "\n sp=%#" ARCH_INTNAT_PRINTF_FORMAT "x @%ld:",
(intnat) sp, (long) (caml_stack_high - sp)); (intnat) sp, (long) (Caml_state->stack_high - sp));
for (p = sp, i = 0; i < 12 + (1 << caml_trace_level) && p < caml_stack_high; for (p = sp, i = 0;
i < 12 + (1 << caml_trace_level) && p < Caml_state->stack_high;
p++, i++) { p++, i++) {
fprintf (f, "\n[%ld] ", (long) (caml_stack_high - p)); fprintf (f, "\n[%ld] ", (long) (Caml_state->stack_high - p));
caml_trace_value_file (*p, prog, proglen, f); caml_trace_value_file (*p, prog, proglen, f);
}; };
putc ('\n', f); putc ('\n', f);

View File

@ -573,7 +573,7 @@ static void intern_rec(value *dest)
if (ops->finalize != NULL && Is_young(v)) { if (ops->finalize != NULL && Is_young(v)) {
/* Remember that the block has a finalizer. */ /* Remember that the block has a finalizer. */
add_to_custom_table (&caml_custom_table, v, 0, 1); add_to_custom_table (Caml_state->custom_table, v, 0, 1);
} }
intern_dest += 1 + size; intern_dest += 1 + size;

View File

@ -40,10 +40,10 @@
sp the stack pointer (grows downward) sp the stack pointer (grows downward)
accu the accumulator accu the accumulator
env heap-allocated environment env heap-allocated environment
caml_trapsp pointer to the current trap frame Caml_state->trapsp pointer to the current trap frame
extra_args number of extra arguments provided by the caller extra_args number of extra arguments provided by the caller
sp is a local copy of the global variable caml_extern_sp. */ sp is a local copy of the global variable Caml_state->extern_sp. */
/* Instruction decoding */ /* Instruction decoding */
@ -70,13 +70,13 @@ sp is a local copy of the global variable caml_extern_sp. */
#undef Alloc_small_origin #undef Alloc_small_origin
#define Alloc_small_origin CAML_FROM_CAML #define Alloc_small_origin CAML_FROM_CAML
#define Setup_for_gc \ #define Setup_for_gc \
{ sp -= 2; sp[0] = accu; sp[1] = env; caml_extern_sp = sp; } { sp -= 2; sp[0] = accu; sp[1] = env; Caml_state->extern_sp = sp; }
#define Restore_after_gc \ #define Restore_after_gc \
{ accu = sp[0]; env = sp[1]; sp += 2; } { accu = sp[0]; env = sp[1]; sp += 2; }
#define Setup_for_c_call \ #define Setup_for_c_call \
{ saved_pc = pc; *--sp = env; caml_extern_sp = sp; } { saved_pc = pc; *--sp = env; Caml_state->extern_sp = sp; }
#define Restore_after_c_call \ #define Restore_after_c_call \
{ sp = caml_extern_sp; env = *sp++; saved_pc = NULL; } { sp = Caml_state->extern_sp; env = *sp++; saved_pc = NULL; }
/* An event frame must look like accu + a C_CALL frame + a RETURN 1 frame */ /* An event frame must look like accu + a C_CALL frame + a RETURN 1 frame */
#define Setup_for_event \ #define Setup_for_event \
@ -87,9 +87,9 @@ sp is a local copy of the global variable caml_extern_sp. */
sp[3] = (value) pc; /* RETURN frame: saved return address */ \ sp[3] = (value) pc; /* RETURN frame: saved return address */ \
sp[4] = env; /* RETURN frame: saved environment */ \ sp[4] = env; /* RETURN frame: saved environment */ \
sp[5] = Val_long(extra_args); /* RETURN frame: saved extra args */ \ sp[5] = Val_long(extra_args); /* RETURN frame: saved extra args */ \
caml_extern_sp = sp; } Caml_state->extern_sp = sp; }
#define Restore_after_event \ #define Restore_after_event \
{ sp = caml_extern_sp; accu = sp[0]; \ { sp = Caml_state->extern_sp; accu = sp[0]; \
pc = (code_t) sp[3]; env = sp[4]; extra_args = Long_val(sp[5]); \ pc = (code_t) sp[3]; env = sp[4]; extra_args = Long_val(sp[5]); \
sp += 6; } sp += 6; }
@ -99,7 +99,7 @@ sp is a local copy of the global variable caml_extern_sp. */
{ sp -= 4; \ { sp -= 4; \
sp[0] = accu; sp[1] = (value)(pc - 1); \ sp[0] = accu; sp[1] = (value)(pc - 1); \
sp[2] = env; sp[3] = Val_long(extra_args); \ sp[2] = env; sp[3] = Val_long(extra_args); \
caml_extern_sp = sp; } Caml_state->extern_sp = sp; }
#define Restore_after_debugger { sp += 4; } #define Restore_after_debugger { sp += 4; }
#ifdef THREADED_CODE #ifdef THREADED_CODE
@ -242,24 +242,25 @@ value caml_interprete(code_t prog, asize_t prog_size)
#if defined(THREADED_CODE) && defined(ARCH_SIXTYFOUR) && !defined(ARCH_CODE32) #if defined(THREADED_CODE) && defined(ARCH_SIXTYFOUR) && !defined(ARCH_CODE32)
jumptbl_base = Jumptbl_base; jumptbl_base = Jumptbl_base;
#endif #endif
initial_local_roots = caml_local_roots; initial_local_roots = Caml_state->local_roots;
initial_sp_offset = (char *) caml_stack_high - (char *) caml_extern_sp; initial_sp_offset =
initial_external_raise = caml_external_raise; (char *) Caml_state->stack_high - (char *) Caml_state->extern_sp;
initial_external_raise = Caml_state->external_raise;
caml_callback_depth++; caml_callback_depth++;
saved_pc = NULL; saved_pc = NULL;
if (sigsetjmp(raise_buf.buf, 0)) { if (sigsetjmp(raise_buf.buf, 0)) {
caml_local_roots = initial_local_roots; Caml_state->local_roots = initial_local_roots;
sp = caml_extern_sp; sp = Caml_state->extern_sp;
accu = caml_exn_bucket; accu = Caml_state->exn_bucket;
pc = saved_pc; saved_pc = NULL; pc = saved_pc; saved_pc = NULL;
if (pc != NULL) pc += 2; if (pc != NULL) pc += 2;
/* +2 adjustment for the sole purpose of backtraces */ /* +2 adjustment for the sole purpose of backtraces */
goto raise_exception; goto raise_exception;
} }
caml_external_raise = &raise_buf; Caml_state->external_raise = &raise_buf;
sp = caml_extern_sp; sp = Caml_state->extern_sp;
pc = prog; pc = prog;
extra_args = 0; extra_args = 0;
env = Atom(0); env = Atom(0);
@ -269,8 +270,8 @@ value caml_interprete(code_t prog, asize_t prog_size)
#ifdef DEBUG #ifdef DEBUG
next_instr: next_instr:
if (caml_icount-- == 0) caml_stop_here (); if (caml_icount-- == 0) caml_stop_here ();
CAMLassert(sp >= caml_stack_low); CAMLassert(sp >= Caml_state->stack_low);
CAMLassert(sp <= caml_stack_high); CAMLassert(sp <= Caml_state->stack_high);
#endif #endif
goto *(void *)(jumptbl_base + *pc++); /* Jump to the first instruction */ goto *(void *)(jumptbl_base + *pc++); /* Jump to the first instruction */
#else #else
@ -288,8 +289,8 @@ value caml_interprete(code_t prog, asize_t prog_size)
caml_trace_accu_sp_file(accu,sp,prog,prog_size,stdout); caml_trace_accu_sp_file(accu,sp,prog,prog_size,stdout);
fflush(stdout); fflush(stdout);
}; };
CAMLassert(sp >= caml_stack_low); CAMLassert(sp >= Caml_state->stack_low);
CAMLassert(sp <= caml_stack_high); CAMLassert(sp <= Caml_state->stack_high);
#endif #endif
curr_instr = *pc++; curr_instr = *pc++;
@ -825,10 +826,10 @@ value caml_interprete(code_t prog, asize_t prog_size)
Instruct(PUSHTRAP): Instruct(PUSHTRAP):
sp -= 4; sp -= 4;
Trap_pc(sp) = pc + *pc; Trap_pc(sp) = pc + *pc;
Trap_link(sp) = caml_trapsp; Trap_link(sp) = Caml_state->trapsp;
sp[2] = env; sp[2] = env;
sp[3] = Val_long(extra_args); sp[3] = Val_long(extra_args);
caml_trapsp = sp; Caml_state->trapsp = sp;
pc++; pc++;
Next; Next;
@ -840,38 +841,38 @@ value caml_interprete(code_t prog, asize_t prog_size)
pc--; /* restart the POPTRAP after processing the signal */ pc--; /* restart the POPTRAP after processing the signal */
goto process_signal; goto process_signal;
} }
caml_trapsp = Trap_link(sp); Caml_state->trapsp = Trap_link(sp);
sp += 4; sp += 4;
Next; Next;
Instruct(RAISE_NOTRACE): Instruct(RAISE_NOTRACE):
if (caml_trapsp >= caml_trap_barrier) if (Caml_state->trapsp >= Caml_state->trap_barrier)
caml_debugger(TRAP_BARRIER, Val_unit); caml_debugger(TRAP_BARRIER, Val_unit);
goto raise_notrace; goto raise_notrace;
Instruct(RERAISE): Instruct(RERAISE):
if (caml_trapsp >= caml_trap_barrier) if (Caml_state->trapsp >= Caml_state->trap_barrier)
caml_debugger(TRAP_BARRIER, Val_unit); caml_debugger(TRAP_BARRIER, Val_unit);
if (caml_backtrace_active) caml_stash_backtrace(accu, pc, sp, 1); if (Caml_state->backtrace_active) caml_stash_backtrace(accu, pc, sp, 1);
goto raise_notrace; goto raise_notrace;
Instruct(RAISE): Instruct(RAISE):
raise_exception: raise_exception:
if (caml_trapsp >= caml_trap_barrier) if (Caml_state->trapsp >= Caml_state->trap_barrier)
caml_debugger(TRAP_BARRIER, Val_unit); caml_debugger(TRAP_BARRIER, Val_unit);
if (caml_backtrace_active) caml_stash_backtrace(accu, pc, sp, 0); if (Caml_state->backtrace_active) caml_stash_backtrace(accu, pc, sp, 0);
raise_notrace: raise_notrace:
if ((char *) caml_trapsp if ((char *) Caml_state->trapsp
>= (char *) caml_stack_high - initial_sp_offset) { >= (char *) Caml_state->stack_high - initial_sp_offset) {
caml_external_raise = initial_external_raise; Caml_state->external_raise = initial_external_raise;
caml_extern_sp = (value *) ((char *) caml_stack_high Caml_state->extern_sp = (value *) ((char *) Caml_state->stack_high
- initial_sp_offset); - initial_sp_offset);
caml_callback_depth--; caml_callback_depth--;
return Make_exception_result(accu); return Make_exception_result(accu);
} }
sp = caml_trapsp; sp = Caml_state->trapsp;
pc = Trap_pc(sp); pc = Trap_pc(sp);
caml_trapsp = Trap_link(sp); Caml_state->trapsp = Trap_link(sp);
env = sp[2]; env = sp[2];
extra_args = Long_val(sp[3]); extra_args = Long_val(sp[3]);
sp += 4; sp += 4;
@ -880,10 +881,10 @@ value caml_interprete(code_t prog, asize_t prog_size)
/* Stack checks */ /* Stack checks */
check_stacks: check_stacks:
if (sp < caml_stack_threshold) { if (sp < Caml_state->stack_threshold) {
caml_extern_sp = sp; Caml_state->extern_sp = sp;
caml_realloc_stack(Stack_threshold / sizeof(value)); caml_realloc_stack(Stack_threshold / sizeof(value));
sp = caml_extern_sp; sp = Caml_state->extern_sp;
} }
/* Fall through CHECK_SIGNALS */ /* Fall through CHECK_SIGNALS */
@ -1125,8 +1126,8 @@ value caml_interprete(code_t prog, asize_t prog_size)
/* Debugging and machine control */ /* Debugging and machine control */
Instruct(STOP): Instruct(STOP):
caml_external_raise = initial_external_raise; Caml_state->external_raise = initial_external_raise;
caml_extern_sp = sp; Caml_state->extern_sp = sp;
caml_callback_depth--; caml_callback_depth--;
return accu; return accu;

View File

@ -125,7 +125,7 @@ static void realloc_gray_vals (void)
value *new; value *new;
CAMLassert (gray_vals_cur == gray_vals_end); CAMLassert (gray_vals_cur == gray_vals_end);
if (gray_vals_size < caml_stat_heap_wsz / 32){ if (gray_vals_size < Caml_state->stat_heap_wsz / 32){
caml_gc_message (0x08, "Growing gray_vals to %" caml_gc_message (0x08, "Growing gray_vals to %"
ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n", ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n",
(intnat) gray_vals_size * sizeof (value) / 512); (intnat) gray_vals_size * sizeof (value) / 512);
@ -270,10 +270,10 @@ static inline value* mark_slice_darken(value *gray_vals_ptr,
/* The variable child is not changed because it must be mark alive */ /* The variable child is not changed because it must be mark alive */
Field (v, i) = f; Field (v, i) = f;
if (Is_block (f) && Is_young (f) && !Is_young (child)){ if (Is_block (f) && Is_young (f) && !Is_young (child)){
if(in_ephemeron){ if(in_ephemeron) {
add_to_ephe_ref_table (&caml_ephe_ref_table, v, i); add_to_ephe_ref_table (Caml_state->ephe_ref_table, v, i);
}else{ } else {
add_to_ref_table (&caml_ref_table, &Field (v, i)); add_to_ref_table (Caml_state->ref_table, &Field (v, i));
} }
} }
} }
@ -582,7 +582,7 @@ static void sweep_slice (intnat work)
chunk = Chunk_next (chunk); chunk = Chunk_next (chunk);
if (chunk == NULL){ if (chunk == NULL){
/* Sweeping is done. */ /* Sweeping is done. */
++ caml_stat_major_collections; ++ Caml_state->stat_major_collections;
work = 0; work = 0;
caml_gc_phase = Phase_idle; caml_gc_phase = Phase_idle;
caml_request_minor_gc (); caml_request_minor_gc ();
@ -627,7 +627,7 @@ void caml_major_collection_slice (intnat howmuch)
int i; int i;
/* /*
Free memory at the start of the GC cycle (garbage + free list) (assumed): Free memory at the start of the GC cycle (garbage + free list) (assumed):
FM = caml_stat_heap_wsz * caml_percent_free FM = Caml_state->stat_heap_wsz * caml_percent_free
/ (100 + caml_percent_free) / (100 + caml_percent_free)
Assuming steady state and enforcing a constant allocation rate, then Assuming steady state and enforcing a constant allocation rate, then
@ -639,7 +639,7 @@ void caml_major_collection_slice (intnat howmuch)
Proportion of G consumed since the previous slice: Proportion of G consumed since the previous slice:
PH = caml_allocated_words / G PH = caml_allocated_words / G
= caml_allocated_words * 3 * (100 + caml_percent_free) = caml_allocated_words * 3 * (100 + caml_percent_free)
/ (2 * caml_stat_heap_wsz * caml_percent_free) / (2 * Caml_state->stat_heap_wsz * caml_percent_free)
Proportion of extra-heap resources consumed since the previous slice: Proportion of extra-heap resources consumed since the previous slice:
PE = caml_extra_heap_resources PE = caml_extra_heap_resources
Proportion of total work to do in this slice: Proportion of total work to do in this slice:
@ -650,10 +650,10 @@ void caml_major_collection_slice (intnat howmuch)
the P above. the P above.
Amount of marking work for the GC cycle: Amount of marking work for the GC cycle:
MW = caml_stat_heap_wsz * 100 / (100 + caml_percent_free) MW = Caml_state->stat_heap_wsz * 100 / (100 + caml_percent_free)
+ caml_incremental_roots_count + caml_incremental_roots_count
Amount of sweeping work for the GC cycle: Amount of sweeping work for the GC cycle:
SW = caml_stat_heap_wsz SW = Caml_state->stat_heap_wsz
In order to finish marking with a non-empty free list, we will In order to finish marking with a non-empty free list, we will
use 40% of the time for marking, and 60% for sweeping. use 40% of the time for marking, and 60% for sweeping.
@ -673,11 +673,12 @@ void caml_major_collection_slice (intnat howmuch)
Amount of marking work for a marking slice: Amount of marking work for a marking slice:
MS = P * MW / (40/100) MS = P * MW / (40/100)
MS = P * (caml_stat_heap_wsz * 250 / (100 + caml_percent_free) MS = P * (Caml_state->stat_heap_wsz * 250
/ (100 + caml_percent_free)
+ 2.5 * caml_incremental_roots_count) + 2.5 * caml_incremental_roots_count)
Amount of sweeping work for a sweeping slice: Amount of sweeping work for a sweeping slice:
SS = P * SW / (60/100) SS = P * SW / (60/100)
SS = P * caml_stat_heap_wsz * 5 / 3 SS = P * Caml_state->stat_heap_wsz * 5 / 3
This slice will either mark MS words or sweep SS words. This slice will either mark MS words or sweep SS words.
*/ */
@ -686,7 +687,7 @@ void caml_major_collection_slice (intnat howmuch)
CAML_INSTR_SETUP (tmr, "major"); CAML_INSTR_SETUP (tmr, "major");
p = (double) caml_allocated_words * 3.0 * (100 + caml_percent_free) p = (double) caml_allocated_words * 3.0 * (100 + caml_percent_free)
/ caml_stat_heap_wsz / caml_percent_free / 2.0; / Caml_state->stat_heap_wsz / caml_percent_free / 2.0;
if (caml_dependent_size > 0){ if (caml_dependent_size > 0){
dp = (double) caml_dependent_allocated * (100 + caml_percent_free) dp = (double) caml_dependent_allocated * (100 + caml_percent_free)
/ caml_dependent_size / caml_percent_free; / caml_dependent_size / caml_percent_free;
@ -752,7 +753,7 @@ void caml_major_collection_slice (intnat howmuch)
}else{ }else{
/* manual setting */ /* manual setting */
filt_p = (double) howmuch * 3.0 * (100 + caml_percent_free) filt_p = (double) howmuch * 3.0 * (100 + caml_percent_free)
/ caml_stat_heap_wsz / caml_percent_free / 2.0; / Caml_state->stat_heap_wsz / caml_percent_free / 2.0;
} }
caml_major_work_credit += filt_p; caml_major_work_credit += filt_p;
} }
@ -764,7 +765,7 @@ void caml_major_collection_slice (intnat howmuch)
(intnat) (p * 1000000)); (intnat) (p * 1000000));
if (caml_gc_phase == Phase_idle){ if (caml_gc_phase == Phase_idle){
if (caml_young_ptr == caml_young_alloc_end){ if (Caml_state->young_ptr == Caml_state->young_alloc_end){
/* We can only start a major GC cycle if the minor allocation arena /* We can only start a major GC cycle if the minor allocation arena
is empty, otherwise we'd have to treat it as a set of roots. */ is empty, otherwise we'd have to treat it as a set of roots. */
start_cycle (); start_cycle ();
@ -780,11 +781,11 @@ void caml_major_collection_slice (intnat howmuch)
} }
if (caml_gc_phase == Phase_mark || caml_gc_phase == Phase_clean){ if (caml_gc_phase == Phase_mark || caml_gc_phase == Phase_clean){
computed_work = (intnat) (p * ((double) caml_stat_heap_wsz * 250 computed_work = (intnat) (p * ((double) Caml_state->stat_heap_wsz * 250
/ (100 + caml_percent_free) / (100 + caml_percent_free)
+ caml_incremental_roots_count)); + caml_incremental_roots_count));
}else{ }else{
computed_work = (intnat) (p * caml_stat_heap_wsz * 5 / 3); computed_work = (intnat) (p * Caml_state->stat_heap_wsz * 5 / 3);
} }
caml_gc_message (0x40, "computed work = %" caml_gc_message (0x40, "computed work = %"
ARCH_INTNAT_PRINTF_FORMAT "d words\n", computed_work); ARCH_INTNAT_PRINTF_FORMAT "d words\n", computed_work);
@ -825,7 +826,7 @@ void caml_major_collection_slice (intnat howmuch)
for (i = 0; i < caml_major_window; i++) caml_major_ring[i] += p; for (i = 0; i < caml_major_window; i++) caml_major_ring[i] += p;
} }
caml_stat_major_words += caml_allocated_words; Caml_state->stat_major_words += caml_allocated_words;
caml_allocated_words = 0; caml_allocated_words = 0;
caml_dependent_allocated = 0; caml_dependent_allocated = 0;
caml_extra_heap_resources = 0.0; caml_extra_heap_resources = 0.0;
@ -847,7 +848,7 @@ void caml_finish_major_cycle (void)
CAMLassert (caml_gc_phase == Phase_sweep); CAMLassert (caml_gc_phase == Phase_sweep);
while (caml_gc_phase == Phase_sweep) sweep_slice (LONG_MAX); while (caml_gc_phase == Phase_sweep) sweep_slice (LONG_MAX);
CAMLassert (caml_gc_phase == Phase_idle); CAMLassert (caml_gc_phase == Phase_idle);
caml_stat_major_words += caml_allocated_words; Caml_state->stat_major_words += caml_allocated_words;
caml_allocated_words = 0; caml_allocated_words = 0;
} }
@ -863,7 +864,7 @@ asize_t caml_clip_heap_chunk_wsz (asize_t wsz)
if (caml_major_heap_increment > 1000){ if (caml_major_heap_increment > 1000){
incr = caml_major_heap_increment; incr = caml_major_heap_increment;
}else{ }else{
incr = caml_stat_heap_wsz / 100 * caml_major_heap_increment; incr = Caml_state->stat_heap_wsz / 100 * caml_major_heap_increment;
} }
if (result < incr){ if (result < incr){
@ -880,27 +881,28 @@ void caml_init_major_heap (asize_t heap_size)
{ {
int i; int i;
caml_stat_heap_wsz = caml_clip_heap_chunk_wsz (Wsize_bsize (heap_size)); Caml_state->stat_heap_wsz =
caml_stat_top_heap_wsz = caml_stat_heap_wsz; caml_clip_heap_chunk_wsz (Wsize_bsize (heap_size));
CAMLassert (Bsize_wsize (caml_stat_heap_wsz) % Page_size == 0); Caml_state->stat_top_heap_wsz = Caml_state->stat_heap_wsz;
CAMLassert (Bsize_wsize (Caml_state->stat_heap_wsz) % Page_size == 0);
caml_heap_start = caml_heap_start =
(char *) caml_alloc_for_heap (Bsize_wsize (caml_stat_heap_wsz)); (char *) caml_alloc_for_heap (Bsize_wsize (Caml_state->stat_heap_wsz));
if (caml_heap_start == NULL) if (caml_heap_start == NULL)
caml_fatal_error ("cannot allocate initial major heap"); caml_fatal_error ("cannot allocate initial major heap");
Chunk_next (caml_heap_start) = NULL; Chunk_next (caml_heap_start) = NULL;
caml_stat_heap_wsz = Wsize_bsize (Chunk_size (caml_heap_start)); Caml_state->stat_heap_wsz = Wsize_bsize (Chunk_size (caml_heap_start));
caml_stat_heap_chunks = 1; Caml_state->stat_heap_chunks = 1;
caml_stat_top_heap_wsz = caml_stat_heap_wsz; Caml_state->stat_top_heap_wsz = Caml_state->stat_heap_wsz;
if (caml_page_table_add(In_heap, caml_heap_start, if (caml_page_table_add(In_heap, caml_heap_start,
caml_heap_start + Bsize_wsize (caml_stat_heap_wsz)) caml_heap_start + Bsize_wsize (Caml_state->stat_heap_wsz))
!= 0) { != 0) {
caml_fatal_error ("cannot allocate initial page table"); caml_fatal_error ("cannot allocate initial page table");
} }
caml_fl_init_merge (); caml_fl_init_merge ();
caml_make_free_blocks ((value *) caml_heap_start, caml_make_free_blocks ((value *) caml_heap_start,
caml_stat_heap_wsz, 1, Caml_white); Caml_state->stat_heap_wsz, 1, Caml_white);
caml_gc_phase = Phase_idle; caml_gc_phase = Phase_idle;
gray_vals_size = 2048; gray_vals_size = 2048;
gray_vals = (value *) caml_stat_alloc_noexc (gray_vals_size * sizeof (value)); gray_vals = (value *) caml_stat_alloc_noexc (gray_vals_size * sizeof (value));

View File

@ -335,7 +335,7 @@ int caml_add_to_heap (char *m)
caml_gc_message (0x04, "Growing heap to %" caml_gc_message (0x04, "Growing heap to %"
ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n", ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n",
(Bsize_wsize (caml_stat_heap_wsz) + Chunk_size (m)) / 1024); (Bsize_wsize (Caml_state->stat_heap_wsz) + Chunk_size (m)) / 1024);
/* Register block in page table */ /* Register block in page table */
if (caml_page_table_add(In_heap, m, m + Chunk_size(m)) != 0) if (caml_page_table_add(In_heap, m, m + Chunk_size(m)) != 0)
@ -353,12 +353,12 @@ int caml_add_to_heap (char *m)
Chunk_next (m) = cur; Chunk_next (m) = cur;
*last = m; *last = m;
++ caml_stat_heap_chunks; ++ Caml_state->stat_heap_chunks;
} }
caml_stat_heap_wsz += Wsize_bsize (Chunk_size (m)); Caml_state->stat_heap_wsz += Wsize_bsize (Chunk_size (m));
if (caml_stat_heap_wsz > caml_stat_top_heap_wsz){ if (Caml_state->stat_heap_wsz > Caml_state->stat_top_heap_wsz){
caml_stat_top_heap_wsz = caml_stat_heap_wsz; Caml_state->stat_top_heap_wsz = Caml_state->stat_heap_wsz;
} }
return 0; return 0;
} }
@ -437,10 +437,10 @@ void caml_shrink_heap (char *chunk)
*/ */
if (chunk == caml_heap_start) return; if (chunk == caml_heap_start) return;
caml_stat_heap_wsz -= Wsize_bsize (Chunk_size (chunk)); Caml_state->stat_heap_wsz -= Wsize_bsize (Chunk_size (chunk));
caml_gc_message (0x04, "Shrinking heap to %" caml_gc_message (0x04, "Shrinking heap to %"
ARCH_INTNAT_PRINTF_FORMAT "uk words\n", ARCH_INTNAT_PRINTF_FORMAT "uk words\n",
caml_stat_heap_wsz / 1024); Caml_state->stat_heap_wsz / 1024);
#ifdef DEBUG #ifdef DEBUG
{ {
@ -451,7 +451,7 @@ void caml_shrink_heap (char *chunk)
} }
#endif #endif
-- caml_stat_heap_chunks; -- Caml_state->stat_heap_chunks;
/* Remove [chunk] from the list of chunks. */ /* Remove [chunk] from the list of chunks. */
cp = &caml_heap_start; cp = &caml_heap_start;
@ -496,7 +496,7 @@ static inline value caml_alloc_shr_aux (mlsize_t wosize, tag_t tag, int track,
if (new_block == NULL) { if (new_block == NULL) {
if (!raise_oom) if (!raise_oom)
return 0; return 0;
else if (caml_in_minor_collection) else if (Caml_state->in_minor_collection)
caml_fatal_error ("out of memory"); caml_fatal_error ("out of memory");
else else
caml_raise_out_of_memory (); caml_raise_out_of_memory ();
@ -521,7 +521,7 @@ static inline value caml_alloc_shr_aux (mlsize_t wosize, tag_t tag, int track,
== Make_header_with_profinfo (wosize, tag, caml_allocation_color (hp), == Make_header_with_profinfo (wosize, tag, caml_allocation_color (hp),
profinfo)); profinfo));
caml_allocated_words += Whsize_wosize (wosize); caml_allocated_words += Whsize_wosize (wosize);
if (caml_allocated_words > caml_minor_heap_wsz){ if (caml_allocated_words > Caml_state->minor_heap_wsz){
CAML_INSTR_INT ("request_major/alloc_shr@", 1); CAML_INSTR_INT ("request_major/alloc_shr@", 1);
caml_request_major_slice (); caml_request_major_slice ();
} }
@ -648,7 +648,7 @@ CAMLexport CAMLweakdef void caml_initialize (value *fp, value val)
CAMLassert(Is_in_heap_or_young(fp)); CAMLassert(Is_in_heap_or_young(fp));
*fp = val; *fp = val;
if (!Is_young((value)fp) && Is_block (val) && Is_young (val)) { if (!Is_young((value)fp) && Is_block (val) && Is_young (val)) {
add_to_ref_table (&caml_ref_table, fp); add_to_ref_table (Caml_state->ref_table, fp);
} }
} }
@ -701,7 +701,7 @@ CAMLexport CAMLweakdef void caml_modify (value *fp, value val)
} }
/* Check for condition 1. */ /* Check for condition 1. */
if (Is_block(val) && Is_young(val)) { if (Is_block(val) && Is_young(val)) {
add_to_ref_table (&caml_ref_table, fp); add_to_ref_table (Caml_state->ref_table, fp);
} }
} }
} }

View File

@ -45,9 +45,9 @@ static intnat callstack_size = 0;
static value memprof_callback = Val_unit; static value memprof_callback = Val_unit;
/* Pointer to the word following the next sample in the minor /* Pointer to the word following the next sample in the minor
heap. Equals [caml_young_alloc_start] if no sampling is planned in heap. Equals [Caml_state->young_alloc_start] if no sampling is planned in
the current minor heap. the current minor heap.
Invariant: [caml_memprof_young_trigger <= caml_young_ptr]. Invariant: [caml_memprof_young_trigger <= Caml_state->young_ptr].
*/ */
value* caml_memprof_young_trigger; value* caml_memprof_young_trigger;
@ -380,10 +380,10 @@ void caml_memprof_track_alloc_shr(value block)
heap. */ heap. */
static void shift_sample(uintnat n) static void shift_sample(uintnat n)
{ {
if (caml_memprof_young_trigger - caml_young_alloc_start > n) if (caml_memprof_young_trigger - Caml_state->young_alloc_start > n)
caml_memprof_young_trigger -= n; caml_memprof_young_trigger -= n;
else else
caml_memprof_young_trigger = caml_young_alloc_start; caml_memprof_young_trigger = Caml_state->young_alloc_start;
caml_update_young_limit(); caml_update_young_limit();
} }
@ -397,13 +397,13 @@ void caml_memprof_renew_minor_sample(void)
{ {
if (lambda == 0) /* No trigger in the current minor heap. */ if (lambda == 0) /* No trigger in the current minor heap. */
caml_memprof_young_trigger = caml_young_alloc_start; caml_memprof_young_trigger = Caml_state->young_alloc_start;
else { else {
uintnat geom = mt_generate_geom(); uintnat geom = mt_generate_geom();
if (caml_young_ptr - caml_young_alloc_start < geom) if(Caml_state->young_ptr - Caml_state->young_alloc_start < geom)
/* No trigger in the current minor heap. */ /* No trigger in the current minor heap. */
caml_memprof_young_trigger = caml_young_alloc_start; caml_memprof_young_trigger = Caml_state->young_alloc_start;
caml_memprof_young_trigger = caml_young_ptr - (geom - 1); caml_memprof_young_trigger = Caml_state->young_ptr - (geom - 1);
} }
caml_update_young_limit(); caml_update_young_limit();
@ -425,16 +425,18 @@ void caml_memprof_track_young(tag_t tag, uintnat wosize, int from_caml)
} }
/* If [lambda == 0], then [caml_memprof_young_trigger] should be /* If [lambda == 0], then [caml_memprof_young_trigger] should be
equal to [caml_young_alloc_start]. But this function is only equal to [Caml_state->young_alloc_start]. But this function is only
called with [caml_young_alloc_start <= caml_young_ptr < called with [Caml_state->young_alloc_start <= Caml_state->young_ptr <
caml_memprof_young_trigger], which is contradictory. */ caml_memprof_young_trigger], which is contradictory. */
CAMLassert(lambda > 0); CAMLassert(lambda > 0);
occurrences = occurrences =
mt_generate_binom(caml_memprof_young_trigger - 1 - caml_young_ptr) + 1; mt_generate_binom(caml_memprof_young_trigger - 1
- Caml_state->young_ptr) + 1;
if (!from_caml) { if (!from_caml) {
register_postponed_callback(Val_hp(caml_young_ptr), occurrences, Minor); register_postponed_callback(Val_hp(Caml_state->young_ptr), occurrences,
Minor);
caml_memprof_renew_minor_sample(); caml_memprof_renew_minor_sample();
CAMLreturn0; CAMLreturn0;
} }
@ -448,7 +450,7 @@ void caml_memprof_track_young(tag_t tag, uintnat wosize, int from_caml)
/* Restore the minor heap in a valid state for calling the callback. /* Restore the minor heap in a valid state for calling the callback.
We should not call the GC before these two instructions. */ We should not call the GC before these two instructions. */
caml_young_ptr += whsize; Caml_state->young_ptr += whsize;
caml_memprof_renew_minor_sample(); caml_memprof_renew_minor_sample();
/* Empty the queue to make sure callbacks are called in the right /* Empty the queue to make sure callbacks are called in the right
@ -460,14 +462,14 @@ void caml_memprof_track_young(tag_t tag, uintnat wosize, int from_caml)
/* We can now restore the minor heap in the state needed by /* We can now restore the minor heap in the state needed by
[Alloc_small_aux]. */ [Alloc_small_aux]. */
if (caml_young_ptr - whsize < caml_young_trigger) { if (Caml_state->young_ptr - whsize < Caml_state->young_trigger) {
CAML_INSTR_INT ("force_minor/memprof@", 1); CAML_INSTR_INT ("force_minor/memprof@", 1);
caml_gc_dispatch(); caml_gc_dispatch();
} }
/* Re-allocate the block in the minor heap. We should not call the /* Re-allocate the block in the minor heap. We should not call the
GC after this. */ GC after this. */
caml_young_ptr -= whsize; Caml_state->young_ptr -= whsize;
/* Make sure this block is not going to be sampled again. */ /* Make sure this block is not going to be sampled again. */
shift_sample(whsize); shift_sample(whsize);
@ -480,7 +482,7 @@ void caml_memprof_track_young(tag_t tag, uintnat wosize, int from_caml)
the block. In only checks that the block is young. the block. In only checks that the block is young.
- The allocation and initialization happens right after returning - The allocation and initialization happens right after returning
from [caml_memprof_track_young]. */ from [caml_memprof_track_young]. */
caml_ephemeron_set_key(Field(ephe, 0), 0, Val_hp(caml_young_ptr)); caml_ephemeron_set_key(Field(ephe, 0), 0, Val_hp(Caml_state->young_ptr));
} }
/* /!\ Since the heap is in an invalid state before initialization, /* /!\ Since the heap is in an invalid state before initialization,

View File

@ -191,7 +191,7 @@ CAMLprim value caml_realloc_global(value size)
CAMLprim value caml_get_current_environment(value unit) CAMLprim value caml_get_current_environment(value unit)
{ {
return *caml_extern_sp; return *Caml_state->extern_sp;
} }
CAMLprim value caml_invoke_traced_function(value codeptr, value env, value arg) CAMLprim value caml_invoke_traced_function(value codeptr, value env, value arg)
@ -222,9 +222,9 @@ CAMLprim value caml_invoke_traced_function(value codeptr, value env, value arg)
value * osp, * nsp; value * osp, * nsp;
int i; int i;
osp = caml_extern_sp; osp = Caml_state->extern_sp;
caml_extern_sp -= 4; Caml_state->extern_sp -= 4;
nsp = caml_extern_sp; nsp = Caml_state->extern_sp;
for (i = 0; i < 6; i++) nsp[i] = osp[i]; for (i = 0; i < 6; i++) nsp[i] = osp[i];
nsp[6] = codeptr; nsp[6] = codeptr;
nsp[7] = env; nsp[7] = env;
@ -273,13 +273,6 @@ value caml_static_release_bytecode(value prog, value len)
return Val_unit; /* not reached */ return Val_unit; /* not reached */
} }
value * caml_stack_low;
value * caml_stack_high;
value * caml_stack_threshold;
value * caml_extern_sp;
value * caml_trapsp;
int caml_callback_depth;
void (* volatile caml_async_action_hook)(void); void (* volatile caml_async_action_hook)(void);
struct longjmp_buffer * caml_external_raise;
#endif #endif

View File

@ -32,54 +32,53 @@
#include "caml/weak.h" #include "caml/weak.h"
/* Pointers into the minor heap. /* Pointers into the minor heap.
[caml_young_base] [Caml_state->young_base]
The [malloc] block that contains the heap. The [malloc] block that contains the heap.
[caml_young_start] ... [caml_young_end] [Caml_state->young_start] ... [Caml_state->young_end]
The whole range of the minor heap: all young blocks are inside The whole range of the minor heap: all young blocks are inside
this interval. this interval.
[caml_young_alloc_start]...[caml_young_alloc_end] [Caml_state->young_alloc_start]...[Caml_state->young_alloc_end]
The allocation arena: newly-allocated blocks are carved from The allocation arena: newly-allocated blocks are carved from
this interval, starting at [caml_young_alloc_end]. this interval, starting at [Caml_state->young_alloc_end].
[caml_young_alloc_mid] is the mid-point of this interval. [Caml_state->young_alloc_mid] is the mid-point of this interval.
[caml_young_ptr], [caml_young_trigger], [caml_young_limit] [Caml_state->young_ptr], [Caml_state->young_trigger],
[Caml_state->young_limit]
These pointers are all inside the allocation arena. These pointers are all inside the allocation arena.
- [caml_young_ptr] is where the next allocation will take place. - [Caml_state->young_ptr] is where the next allocation will take place.
- [caml_young_trigger] is how far we can allocate before triggering - [Caml_state->young_trigger] is how far we can allocate before
[caml_gc_dispatch]. Currently, it is either [caml_young_alloc_start] triggering [caml_gc_dispatch]. Currently, it is either
or the mid-point of the allocation arena. [Caml_state->young_alloc_start] or the mid-point of the allocation
- [caml_young_limit] is the pointer that is compared to arena.
[caml_young_ptr] for allocation. It is either: - [Caml_state->young_limit] is the pointer that is compared to
+ [caml_young_alloc_end] if a signal is pending and we are in [Caml_state->young_ptr] for allocation. It is either:
native code, + [Caml_state->young_alloc_end] if a signal is pending and we are
in native code,
+ [caml_memprof_young_trigger] if a memprof sample is planned, + [caml_memprof_young_trigger] if a memprof sample is planned,
+ or [caml_young_trigger]. + or [Caml_state->young_trigger].
*/ */
struct generic_table CAML_TABLE_STRUCT(char); struct generic_table CAML_TABLE_STRUCT(char);
asize_t caml_minor_heap_wsz; void caml_alloc_minor_tables ()
static void *caml_young_base = NULL; {
CAMLexport value *caml_young_start = NULL, *caml_young_end = NULL; Caml_state->ref_table =
CAMLexport value *caml_young_alloc_start = NULL, caml_stat_alloc_noexc(sizeof(struct caml_ref_table));
*caml_young_alloc_mid = NULL, if (Caml_state->ref_table == NULL)
*caml_young_alloc_end = NULL; caml_fatal_error ("cannot initialize minor heap");
CAMLexport value *caml_young_ptr = NULL, *caml_young_limit = NULL; memset(Caml_state->ref_table, 0, sizeof(struct caml_ref_table));
CAMLexport value *caml_young_trigger = NULL;
CAMLexport struct caml_ref_table Caml_state->ephe_ref_table =
caml_ref_table = { NULL, NULL, NULL, NULL, NULL, 0, 0}; caml_stat_alloc_noexc(sizeof(struct caml_ephe_ref_table));
if (Caml_state->ephe_ref_table == NULL)
caml_fatal_error ("cannot initialize minor heap");
memset(Caml_state->ephe_ref_table, 0, sizeof(struct caml_ephe_ref_table));
CAMLexport struct caml_ephe_ref_table Caml_state->custom_table =
caml_ephe_ref_table = { NULL, NULL, NULL, NULL, NULL, 0, 0}; caml_stat_alloc_noexc(sizeof(struct caml_custom_table));
if (Caml_state->custom_table == NULL)
CAMLexport struct caml_custom_table caml_fatal_error ("cannot initialize minor heap");
caml_custom_table = { NULL, NULL, NULL, NULL, NULL, 0, 0}; memset(Caml_state->custom_table, 0, sizeof(struct caml_custom_table));
/* Table of custom blocks in the minor heap that contain finalizers }
or GC speed parameters. */
int caml_in_minor_collection = 0;
double caml_extra_heap_resources_minor = 0;
/* [sz] and [rsv] are numbers of entries */ /* [sz] and [rsv] are numbers of entries */
static void alloc_generic_table (struct generic_table *tbl, asize_t sz, static void alloc_generic_table (struct generic_table *tbl, asize_t sz,
@ -141,38 +140,40 @@ void caml_set_minor_heap_size (asize_t bsz)
CAMLassert (bsz >= Bsize_wsize(Minor_heap_min)); CAMLassert (bsz >= Bsize_wsize(Minor_heap_min));
CAMLassert (bsz <= Bsize_wsize(Minor_heap_max)); CAMLassert (bsz <= Bsize_wsize(Minor_heap_max));
CAMLassert (bsz % sizeof (value) == 0); CAMLassert (bsz % sizeof (value) == 0);
if (caml_young_ptr != caml_young_alloc_end){ if (Caml_state->young_ptr != Caml_state->young_alloc_end){
CAML_INSTR_INT ("force_minor/set_minor_heap_size@", 1); CAML_INSTR_INT ("force_minor/set_minor_heap_size@", 1);
caml_requested_minor_gc = 0; Caml_state->requested_minor_gc = 0;
caml_young_trigger = caml_young_alloc_mid; Caml_state->young_trigger = Caml_state->young_alloc_mid;
caml_update_young_limit(); caml_update_young_limit();
caml_empty_minor_heap (); caml_empty_minor_heap ();
} }
CAMLassert (caml_young_ptr == caml_young_alloc_end); CAMLassert (Caml_state->young_ptr == Caml_state->young_alloc_end);
new_heap = caml_stat_alloc_aligned_noexc(bsz, 0, &new_heap_base); new_heap = caml_stat_alloc_aligned_noexc(bsz, 0, &new_heap_base);
if (new_heap == NULL) caml_raise_out_of_memory(); if (new_heap == NULL) caml_raise_out_of_memory();
if (caml_page_table_add(In_young, new_heap, new_heap + bsz) != 0) if (caml_page_table_add(In_young, new_heap, new_heap + bsz) != 0)
caml_raise_out_of_memory(); caml_raise_out_of_memory();
if (caml_young_start != NULL){ if (Caml_state->young_start != NULL){
caml_page_table_remove(In_young, caml_young_start, caml_young_end); caml_page_table_remove(In_young, Caml_state->young_start,
caml_stat_free (caml_young_base); Caml_state->young_end);
caml_stat_free (Caml_state->young_base);
} }
caml_young_base = new_heap_base; Caml_state->young_base = new_heap_base;
caml_young_start = (value *) new_heap; Caml_state->young_start = (value *) new_heap;
caml_young_end = (value *) (new_heap + bsz); Caml_state->young_end = (value *) (new_heap + bsz);
caml_young_alloc_start = caml_young_start; Caml_state->young_alloc_start = Caml_state->young_start;
caml_young_alloc_mid = caml_young_alloc_start + Wsize_bsize (bsz) / 2; Caml_state->young_alloc_mid =
caml_young_alloc_end = caml_young_end; Caml_state->young_alloc_start + Wsize_bsize (bsz) / 2;
caml_young_trigger = caml_young_alloc_start; Caml_state->young_alloc_end = Caml_state->young_end;
Caml_state->young_trigger = Caml_state->young_alloc_start;
caml_update_young_limit(); caml_update_young_limit();
caml_young_ptr = caml_young_alloc_end; Caml_state->young_ptr = Caml_state->young_alloc_end;
caml_minor_heap_wsz = Wsize_bsize (bsz); Caml_state->minor_heap_wsz = Wsize_bsize (bsz);
caml_memprof_renew_minor_sample(); caml_memprof_renew_minor_sample();
reset_table ((struct generic_table *) &caml_ref_table); reset_table ((struct generic_table *) Caml_state->ref_table);
reset_table ((struct generic_table *) &caml_ephe_ref_table); reset_table ((struct generic_table *) Caml_state->ephe_ref_table);
reset_table ((struct generic_table *) &caml_custom_table); reset_table ((struct generic_table *) Caml_state->custom_table);
} }
static value oldify_todo_list = 0; static value oldify_todo_list = 0;
@ -189,7 +190,7 @@ void caml_oldify_one (value v, value *p)
tail_call: tail_call:
if (Is_block (v) && Is_young (v)){ if (Is_block (v) && Is_young (v)){
CAMLassert ((value *) Hp_val (v) >= caml_young_ptr); CAMLassert ((value *) Hp_val (v) >= Caml_state->young_ptr);
hd = Hd_val (v); hd = Hd_val (v);
if (hd == 0){ /* If already forwarded */ if (hd == 0){ /* If already forwarded */
*p = Field (v, 0); /* then forward pointer is first field. */ *p = Field (v, 0); /* then forward pointer is first field. */
@ -315,8 +316,8 @@ void caml_oldify_mopup (void)
/* Oldify the data in the minor heap of alive ephemeron /* Oldify the data in the minor heap of alive ephemeron
During minor collection keys outside the minor heap are considered alive */ During minor collection keys outside the minor heap are considered alive */
for (re = caml_ephe_ref_table.base; for (re = Caml_state->ephe_ref_table->base;
re < caml_ephe_ref_table.ptr; re++){ re < Caml_state->ephe_ref_table->ptr; re++){
/* look only at ephemeron with data in the minor heap */ /* look only at ephemeron with data in the minor heap */
if (re->offset == 1){ if (re->offset == 1){
value *data = &Field(re->ephe,1); value *data = &Field(re->ephe,1);
@ -346,23 +347,24 @@ void caml_empty_minor_heap (void)
uintnat prev_alloc_words; uintnat prev_alloc_words;
struct caml_ephe_ref_elt *re; struct caml_ephe_ref_elt *re;
if (caml_young_ptr != caml_young_alloc_end){ if (Caml_state->young_ptr != Caml_state->young_alloc_end){
if (caml_minor_gc_begin_hook != NULL) (*caml_minor_gc_begin_hook) (); if (caml_minor_gc_begin_hook != NULL) (*caml_minor_gc_begin_hook) ();
CAML_INSTR_SETUP (tmr, "minor"); CAML_INSTR_SETUP (tmr, "minor");
prev_alloc_words = caml_allocated_words; prev_alloc_words = caml_allocated_words;
caml_in_minor_collection = 1; Caml_state->in_minor_collection = 1;
caml_gc_message (0x02, "<"); caml_gc_message (0x02, "<");
caml_oldify_local_roots(); caml_oldify_local_roots();
CAML_INSTR_TIME (tmr, "minor/local_roots"); CAML_INSTR_TIME (tmr, "minor/local_roots");
for (r = caml_ref_table.base; r < caml_ref_table.ptr; r++){ for (r = Caml_state->ref_table->base;
r < Caml_state->ref_table->ptr; r++) {
caml_oldify_one (**r, *r); caml_oldify_one (**r, *r);
} }
CAML_INSTR_TIME (tmr, "minor/ref_table"); CAML_INSTR_TIME (tmr, "minor/ref_table");
caml_oldify_mopup (); caml_oldify_mopup ();
CAML_INSTR_TIME (tmr, "minor/copy"); CAML_INSTR_TIME (tmr, "minor/copy");
/* Update the ephemerons */ /* Update the ephemerons */
for (re = caml_ephe_ref_table.base; for (re = Caml_state->ephe_ref_table->base;
re < caml_ephe_ref_table.ptr; re++){ re < Caml_state->ephe_ref_table->ptr; re++){
if(re->offset < Wosize_val(re->ephe)){ if(re->offset < Wosize_val(re->ephe)){
/* If it is not the case, the ephemeron has been truncated */ /* If it is not the case, the ephemeron has been truncated */
value *key = &Field(re->ephe,re->offset); value *key = &Field(re->ephe,re->offset);
@ -380,7 +382,8 @@ void caml_empty_minor_heap (void)
/* Update the OCaml finalise_last values */ /* Update the OCaml finalise_last values */
caml_final_update_minor_roots(); caml_final_update_minor_roots();
/* Run custom block finalisation of dead minor values */ /* Run custom block finalisation of dead minor values */
for (elt = caml_custom_table.base; elt < caml_custom_table.ptr; elt++){ for (elt = Caml_state->custom_table->base;
elt < Caml_state->custom_table->ptr; elt++){
value v = elt->block; value v = elt->block;
if (Hd_val (v) == 0){ if (Hd_val (v) == 0){
/* Block was copied to the major heap: adjust GC speed numbers. */ /* Block was copied to the major heap: adjust GC speed numbers. */
@ -392,21 +395,23 @@ void caml_empty_minor_heap (void)
} }
} }
CAML_INSTR_TIME (tmr, "minor/update_weak"); CAML_INSTR_TIME (tmr, "minor/update_weak");
caml_stat_minor_words += caml_young_alloc_end - caml_young_ptr; Caml_state->stat_minor_words +=
caml_gc_clock += (double) (caml_young_alloc_end - caml_young_ptr) Caml_state->young_alloc_end - Caml_state->young_ptr;
/ caml_minor_heap_wsz; caml_gc_clock +=
caml_young_ptr = caml_young_alloc_end; (double) (Caml_state->young_alloc_end - Caml_state->young_ptr)
clear_table ((struct generic_table *) &caml_ref_table); / Caml_state->minor_heap_wsz;
clear_table ((struct generic_table *) &caml_ephe_ref_table); Caml_state->young_ptr = Caml_state->young_alloc_end;
clear_table ((struct generic_table *) &caml_custom_table); clear_table ((struct generic_table *) Caml_state->ref_table);
caml_extra_heap_resources_minor = 0; clear_table ((struct generic_table *) Caml_state->ephe_ref_table);
clear_table ((struct generic_table *) Caml_state->custom_table);
Caml_state->extra_heap_resources_minor = 0;
caml_gc_message (0x02, ">"); caml_gc_message (0x02, ">");
caml_in_minor_collection = 0; Caml_state->in_minor_collection = 0;
caml_final_empty_young (); caml_final_empty_young ();
CAML_INSTR_TIME (tmr, "minor/finalized"); CAML_INSTR_TIME (tmr, "minor/finalized");
caml_stat_promoted_words += caml_allocated_words - prev_alloc_words; Caml_state->stat_promoted_words += caml_allocated_words - prev_alloc_words;
CAML_INSTR_INT ("minor/promoted#", caml_allocated_words - prev_alloc_words); CAML_INSTR_INT ("minor/promoted#", caml_allocated_words - prev_alloc_words);
++ caml_stat_minor_collections; ++ Caml_state->stat_minor_collections;
caml_memprof_renew_minor_sample(); caml_memprof_renew_minor_sample();
if (caml_minor_gc_end_hook != NULL) (*caml_minor_gc_end_hook) (); if (caml_minor_gc_end_hook != NULL) (*caml_minor_gc_end_hook) ();
}else{ }else{
@ -416,7 +421,8 @@ void caml_empty_minor_heap (void)
#ifdef DEBUG #ifdef DEBUG
{ {
value *p; value *p;
for (p = caml_young_alloc_start; p < caml_young_alloc_end; ++p){ for (p = Caml_state->young_alloc_start; p < Caml_state->young_alloc_end;
++p) {
*p = Debug_free_minor; *p = Debug_free_minor;
} }
} }
@ -434,7 +440,7 @@ extern uintnat caml_instr_alloc_jump;
*/ */
CAMLexport void caml_gc_dispatch (void) CAMLexport void caml_gc_dispatch (void)
{ {
value *trigger = caml_young_trigger; /* save old value of trigger */ value *trigger = Caml_state->young_trigger; /* save old value of trigger */
#ifdef CAML_INSTR #ifdef CAML_INSTR
CAML_INSTR_SETUP(tmr, "dispatch"); CAML_INSTR_SETUP(tmr, "dispatch");
CAML_INSTR_TIME (tmr, "overhead"); CAML_INSTR_TIME (tmr, "overhead");
@ -442,48 +448,50 @@ CAMLexport void caml_gc_dispatch (void)
caml_instr_alloc_jump = 0; caml_instr_alloc_jump = 0;
#endif #endif
if (trigger == caml_young_alloc_start || caml_requested_minor_gc){ if (trigger == Caml_state->young_alloc_start
|| Caml_state->requested_minor_gc) {
/* The minor heap is full, we must do a minor collection. */ /* The minor heap is full, we must do a minor collection. */
/* reset the pointers first because the end hooks might allocate */ /* reset the pointers first because the end hooks might allocate */
caml_requested_minor_gc = 0; Caml_state->requested_minor_gc = 0;
caml_young_trigger = caml_young_alloc_mid; Caml_state->young_trigger = Caml_state->young_alloc_mid;
caml_update_young_limit(); caml_update_young_limit();
caml_empty_minor_heap (); caml_empty_minor_heap ();
/* The minor heap is empty, we can start a major collection. */ /* The minor heap is empty, we can start a major collection. */
if (caml_gc_phase == Phase_idle) caml_major_collection_slice (-1); if (caml_gc_phase == Phase_idle) caml_major_collection_slice (-1);
CAML_INSTR_TIME (tmr, "dispatch/minor"); CAML_INSTR_TIME (tmr, "dispatch/minor");
} }
if (trigger != caml_young_alloc_start || caml_requested_major_slice){ if (trigger != Caml_state->young_alloc_start
|| Caml_state->requested_major_slice) {
/* The minor heap is half-full, do a major GC slice. */ /* The minor heap is half-full, do a major GC slice. */
caml_requested_major_slice = 0; Caml_state->requested_major_slice = 0;
caml_young_trigger = caml_young_alloc_start; Caml_state->young_trigger = Caml_state->young_alloc_start;
caml_update_young_limit(); caml_update_young_limit();
caml_major_collection_slice (-1); caml_major_collection_slice (-1);
CAML_INSTR_TIME (tmr, "dispatch/major"); CAML_INSTR_TIME (tmr, "dispatch/major");
} }
} }
/* Called by [Alloc_small] when [caml_young_ptr] reaches [caml_young_limit]. /* Called by [Alloc_small] when [Caml_state->young_ptr] reaches
We have to either call memprof or the gc. */ [caml_young_limit]. We have to either call memprof or the gc. */
void caml_alloc_small_dispatch (tag_t tag, intnat wosize, int flags) void caml_alloc_small_dispatch (tag_t tag, intnat wosize, int flags)
{ {
/* Async callbacks may fill the minor heap again, so we need a while /* Async callbacks may fill the minor heap again, so we need a while
loop here. */ loop here. */
while (caml_young_ptr < caml_young_trigger){ while (Caml_state->young_ptr < Caml_state->young_trigger){
caml_young_ptr += Whsize_wosize (wosize); Caml_state->young_ptr += Whsize_wosize (wosize);
CAML_INSTR_INT ("force_minor/alloc_small@", 1); CAML_INSTR_INT ("force_minor/alloc_small@", 1);
caml_gc_dispatch (); caml_gc_dispatch ();
if(flags & CAML_FROM_CAML) caml_check_urgent_gc (Val_unit); if(flags & CAML_FROM_CAML) caml_check_urgent_gc (Val_unit);
caml_young_ptr -= Whsize_wosize (wosize); Caml_state->young_ptr -= Whsize_wosize (wosize);
} }
if(caml_young_ptr < caml_memprof_young_trigger){ if(Caml_state->young_ptr < caml_memprof_young_trigger){
if(flags & CAML_DO_TRACK) { if(flags & CAML_DO_TRACK) {
caml_memprof_track_young(tag, wosize, flags & CAML_FROM_CAML); caml_memprof_track_young(tag, wosize, flags & CAML_FROM_CAML);
/* Until the allocation actually takes place, the heap is in an invalid /* Until the allocation actually takes place, the heap is in an invalid
state (see comments in [caml_memprof_track_young]). Hence, very little state (see comments in [caml_memprof_track_young]). Hence, very little
heap operations are allowed before the actual allocation. heap operations are allowed before the actual allocation.
Moreover, [caml_young_ptr] should not be modified before the Moreover, [Caml_state->young_ptr] should not be modified before the
allocation, because its value has been used as the pointer to allocation, because its value has been used as the pointer to
the sampled block. the sampled block.
*/ */
@ -496,7 +504,7 @@ void caml_alloc_small_dispatch (tag_t tag, intnat wosize, int flags)
*/ */
CAMLexport void caml_minor_collection (void) CAMLexport void caml_minor_collection (void)
{ {
caml_requested_minor_gc = 1; Caml_state->requested_minor_gc = 1;
caml_gc_dispatch (); caml_gc_dispatch ();
} }
@ -509,7 +517,7 @@ static void realloc_generic_table
CAMLassert (tbl->limit >= tbl->threshold); CAMLassert (tbl->limit >= tbl->threshold);
if (tbl->base == NULL){ if (tbl->base == NULL){
alloc_generic_table (tbl, caml_minor_heap_wsz / 8, 256, alloc_generic_table (tbl, Caml_state->minor_heap_wsz / 8, 256,
element_size); element_size);
}else if (tbl->limit == tbl->threshold){ }else if (tbl->limit == tbl->threshold){
CAML_INSTR_INT (msg_intr_int, 1); CAML_INSTR_INT (msg_intr_int, 1);
@ -519,7 +527,7 @@ static void realloc_generic_table
}else{ }else{
asize_t sz; asize_t sz;
asize_t cur_ptr = tbl->ptr - tbl->base; asize_t cur_ptr = tbl->ptr - tbl->base;
CAMLassert (caml_requested_minor_gc); CAMLassert (Caml_state->requested_minor_gc);
tbl->size *= 2; tbl->size *= 2;
sz = (tbl->size + tbl->reserve) * element_size; sz = (tbl->size + tbl->reserve) * element_size;

View File

@ -17,6 +17,17 @@
.abiversion 2 .abiversion 2
#endif #endif
/* Special registers */
#define START_PRG_ARG 12
#define START_PRG_DOMAIN_STATE_PTR 7
#define C_CALL_FUN 25
#define C_CALL_TOC 26
#define C_CALL_RET_ADDR 27
#define DOMAIN_STATE_PTR 28
#define TRAP_PTR 29
#define ALLOC_LIMIT 30
#define ALLOC_PTR 31
#if defined(MODEL_ppc64) || defined(MODEL_ppc64le) #if defined(MODEL_ppc64) || defined(MODEL_ppc64le)
#define EITHER(a,b) b #define EITHER(a,b) b
#else #else
@ -121,19 +132,6 @@
#define Addrglobal(reg,glob) \ #define Addrglobal(reg,glob) \
addis reg, 0, glob@ha; \ addis reg, 0, glob@ha; \
addi reg, reg, glob@l addi reg, reg, glob@l
#define Loadglobal(reg,glob,tmp) \
addis tmp, 0, glob@ha; \
lg reg, glob@l(tmp)
#define Storeglobal(reg,glob,tmp) \
addis tmp, 0, glob@ha; \
stg reg, glob@l(tmp)
#define Loadglobal32(reg,glob,tmp) \
addis tmp, 0, glob@ha; \
lwz reg, glob@l(tmp)
#define Storeglobal32(reg,glob,tmp) \
addis tmp, 0, glob@ha; \
stw reg, glob@l(tmp)
#endif #endif
#if defined(MODEL_ppc64) || defined(MODEL_ppc64le) #if defined(MODEL_ppc64) || defined(MODEL_ppc64le)
@ -142,21 +140,17 @@
#define Addrglobal(reg,glob) \ #define Addrglobal(reg,glob) \
ld reg, LSYMB(glob)@toc(2) ld reg, LSYMB(glob)@toc(2)
#define Loadglobal(reg,glob,tmp) \
Addrglobal(tmp,glob); \
lg reg, 0(tmp)
#define Storeglobal(reg,glob,tmp) \
Addrglobal(tmp,glob); \
stg reg, 0(tmp)
#define Loadglobal32(reg,glob,tmp) \
Addrglobal(tmp,glob); \
lwz reg, 0(tmp)
#define Storeglobal32(reg,glob,tmp) \
Addrglobal(tmp,glob); \
stw reg, 0(tmp)
#endif #endif
.set domain_curr_field, 0
#define DOMAIN_STATE(c_type, name) \
.equ domain_field_caml_##name, domain_curr_field ; \
.set domain_curr_field, domain_curr_field + 1
#include "../runtime/caml/domain_state.tbl"
#undef DOMAIN_STATE
#define Caml_state(var) 8*domain_field_caml_##var(28)
#if defined(MODEL_ppc64) #if defined(MODEL_ppc64)
.section ".opd","aw" .section ".opd","aw"
#else #else
@ -174,17 +168,17 @@ FUNCTION(caml_call_gc)
stwu 1, -STACKSIZE(1) stwu 1, -STACKSIZE(1)
/* Record return address into OCaml code */ /* Record return address into OCaml code */
mflr 0 mflr 0
Storeglobal(0, caml_last_return_address, 11) stg 0, Caml_state(last_return_address)
/* Record lowest stack address */ /* Record lowest stack address */
addi 0, 1, STACKSIZE addi 0, 1, STACKSIZE
Storeglobal(0, caml_bottom_of_stack, 11) stg 0, Caml_state(bottom_of_stack)
/* Record pointer to register array */ /* Record pointer to register array */
addi 0, 1, 8*32 + PARAM_SAVE_AREA + RESERVED_STACK addi 0, 1, 8*32 + PARAM_SAVE_AREA + RESERVED_STACK
Storeglobal(0, caml_gc_regs, 11) stg 0, Caml_state(gc_regs)
/* Save current allocation pointer for debugging purposes */ /* Save current allocation pointer for debugging purposes */
Storeglobal(31, caml_young_ptr, 11) stg ALLOC_PTR, Caml_state(young_ptr)
/* Save exception pointer (if e.g. a sighandler raises) */ /* Save exception pointer (if e.g. a sighandler raises) */
Storeglobal(29, caml_exception_pointer, 11) stg TRAP_PTR, Caml_state(exception_pointer)
/* Save all registers used by the code generator */ /* Save all registers used by the code generator */
addi 11, 1, 8*32 + PARAM_SAVE_AREA + RESERVED_STACK - WORD addi 11, 1, 8*32 + PARAM_SAVE_AREA + RESERVED_STACK - WORD
stgu 3, WORD(11) stgu 3, WORD(11)
@ -248,8 +242,8 @@ FUNCTION(caml_call_gc)
nop nop
#endif #endif
/* Reload new allocation pointer and allocation limit */ /* Reload new allocation pointer and allocation limit */
Loadglobal(31, caml_young_ptr, 11) lg ALLOC_PTR, Caml_state(young_ptr)
Loadglobal(30, caml_young_limit, 11) lg ALLOC_LIMIT, Caml_state(young_limit)
/* Restore all regs used by the code generator */ /* Restore all regs used by the code generator */
addi 11, 1, 8*32 + PARAM_SAVE_AREA + RESERVED_STACK - WORD addi 11, 1, 8*32 + PARAM_SAVE_AREA + RESERVED_STACK - WORD
lgu 3, WORD(11) lgu 3, WORD(11)
@ -308,7 +302,7 @@ FUNCTION(caml_call_gc)
lfdu 30, 8(11) lfdu 30, 8(11)
lfdu 31, 8(11) lfdu 31, 8(11)
/* Return to caller, restarting the allocation */ /* Return to caller, restarting the allocation */
Loadglobal(11, caml_last_return_address, 11) lg 11, Caml_state(last_return_address)
addi 11, 11, -16 /* Restart the allocation (4 instructions) */ addi 11, 11, -16 /* Restart the allocation (4 instructions) */
mtlr 11 mtlr 11
/* For PPC64: restore the TOC that the caller saved at the usual place */ /* For PPC64: restore the TOC that the caller saved at the usual place */
@ -326,39 +320,39 @@ ENDFUNCTION(caml_call_gc)
FUNCTION(caml_c_call) FUNCTION(caml_c_call)
.cfi_startproc .cfi_startproc
/* Save return address in a callee-save register */ /* Save return address in a callee-save register */
mflr 27 mflr C_CALL_RET_ADDR
.cfi_register 65, 27 .cfi_register 65, C_CALL_RET_ADDR
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
Storeglobal(1, caml_bottom_of_stack, 11) stg 1, Caml_state(bottom_of_stack)
Storeglobal(27, caml_last_return_address, 11) stg C_CALL_RET_ADDR, Caml_state(last_return_address)
/* Make the exception handler and alloc ptr available to the C code */ /* Make the exception handler and alloc ptr available to the C code */
Storeglobal(31, caml_young_ptr, 11) stg ALLOC_PTR, Caml_state(young_ptr)
Storeglobal(29, caml_exception_pointer, 11) stg TRAP_PTR, Caml_state(exception_pointer)
/* Call C function (address in r28) */ /* Call C function (address in C_CALL_FUN) */
#if defined(MODEL_ppc) #if defined(MODEL_ppc)
mtctr 28 mtctr C_CALL_FUN
bctrl bctrl
#elif defined(MODEL_ppc64) #elif defined(MODEL_ppc64)
ld 0, 0(28) ld 0, 0(C_CALL_FUN)
mr 26, 2 /* save current TOC in a callee-save register */ mr C_CALL_TOC, 2 /* save current TOC in a callee-save register */
mtctr 0 mtctr 0
ld 2, 8(28) ld 2, 8(C_CALL_FUN)
bctrl bctrl
mr 2, 26 /* restore current TOC */ mr 2, C_CALL_TOC /* restore current TOC */
#elif defined(MODEL_ppc64le) #elif defined(MODEL_ppc64le)
mtctr 28 mtctr C_CALL_FUN
mr 12, 28 mr 12, C_CALL_FUN
mr 26, 2 /* save current TOC in a callee-save register */ mr C_CALL_TOC, 2 /* save current TOC in a callee-save register */
bctrl bctrl
mr 2, 26 /* restore current TOC */ mr 2, C_CALL_TOC /* restore current TOC */
#else #else
#error "wrong MODEL" #error "wrong MODEL"
#endif #endif
/* Restore return address (in 27, preserved by the C function) */ /* Restore return address (in 27, preserved by the C function) */
mtlr 27 mtlr C_CALL_RET_ADDR
/* Reload allocation pointer and allocation limit*/ /* Reload allocation pointer and allocation limit*/
Loadglobal(31, caml_young_ptr, 11) lg ALLOC_PTR, Caml_state(young_ptr)
Loadglobal(30, caml_young_limit, 11) lg ALLOC_LIMIT, Caml_state(young_limit)
/* Return to caller */ /* Return to caller */
blr blr
.cfi_endproc .cfi_endproc
@ -367,67 +361,70 @@ ENDFUNCTION(caml_c_call)
/* Raise an exception from OCaml */ /* Raise an exception from OCaml */
FUNCTION(caml_raise_exn) FUNCTION(caml_raise_exn)
Loadglobal32(0, caml_backtrace_active, 11) lg 0, Caml_state(backtrace_active)
cmpwi 0, 0 cmpwi 0, 0
bne .L111 bne .L111
.L110: .L110:
/* Pop trap frame */ /* Pop trap frame */
lg 0, TRAP_HANDLER_OFFSET(29) lg 0, TRAP_HANDLER_OFFSET(TRAP_PTR)
mr 1, 29 mr 1, TRAP_PTR
mtctr 0 mtctr 0
lg 29, TRAP_PREVIOUS_OFFSET(1) lg TRAP_PTR, TRAP_PREVIOUS_OFFSET(1)
addi 1, 1, TRAP_SIZE addi 1, 1, TRAP_SIZE
/* Branch to handler */ /* Branch to handler */
bctr bctr
.L111: .L111:
mr 28, 3 /* preserve exn bucket in callee-save reg */ mr 27, 3 /* preserve exn bucket in callee-save reg */
/* arg1: exception bucket, already in r3 */ /* arg1: exception bucket, already in r3 */
mflr 4 /* arg2: PC of raise */ mflr 4 /* arg2: PC of raise */
mr 5, 1 /* arg3: SP of raise */ mr 5, 1 /* arg3: SP of raise */
mr 6, 29 /* arg4: SP of handler */ mr 6, TRAP_PTR /* arg4: SP of handler */
addi 1, 1, -(PARAM_SAVE_AREA + RESERVED_STACK) addi 1, 1, -(PARAM_SAVE_AREA + RESERVED_STACK)
/* reserve stack space for C call */ /* reserve stack space for C call */
bl caml_stash_backtrace bl caml_stash_backtrace
#if defined(MODEL_ppc64) || defined(MODEL_ppc64le) #if defined(MODEL_ppc64) || defined(MODEL_ppc64le)
nop nop
#endif #endif
mr 3, 28 /* restore exn bucket */ mr 3, 27 /* restore exn bucket */
b .L110 /* raise the exn */ b .L110 /* raise the exn */
ENDFUNCTION(caml_raise_exn) ENDFUNCTION(caml_raise_exn)
/* Raise an exception from C */ /* Raise an exception from C */
FUNCTION(caml_raise_exception) FUNCTION(caml_raise_exception)
Loadglobal32(0, caml_backtrace_active, 11) /* Load domain state pointer */
mr DOMAIN_STATE_PTR, 3
mr 3, 4
lg 0, Caml_state(backtrace_active)
cmpwi 0, 0 cmpwi 0, 0
bne .L121 bne .L121
.L120: .L120:
/* Reload OCaml global registers */ /* Reload OCaml global registers */
Loadglobal(1, caml_exception_pointer, 11) lg 1, Caml_state(exception_pointer)
Loadglobal(31, caml_young_ptr, 11) lg ALLOC_PTR, Caml_state(young_ptr)
Loadglobal(30, caml_young_limit, 11) lg ALLOC_LIMIT, Caml_state(young_limit)
/* Pop trap frame */ /* Pop trap frame */
lg 0, TRAP_HANDLER_OFFSET(1) lg 0, TRAP_HANDLER_OFFSET(1)
mtctr 0 mtctr 0
lg 29, TRAP_PREVIOUS_OFFSET(1) lg TRAP_PTR, TRAP_PREVIOUS_OFFSET(1)
addi 1, 1, TRAP_SIZE addi 1, 1, TRAP_SIZE
/* Branch to handler */ /* Branch to handler */
bctr bctr
.L121: .L121:
li 0, 0 li 0, 0
Storeglobal32(0, caml_backtrace_pos, 11) stg 0, Caml_state(backtrace_pos)
mr 28, 3 /* preserve exn bucket in callee-save reg */ mr 27, 3 /* preserve exn bucket in callee-save reg */
/* arg1: exception bucket, already in r3 */ /* arg1: exception bucket, already in r3 */
Loadglobal(4, caml_last_return_address, 11) /* arg2: PC of raise */ lg 4, Caml_state(last_return_address) /* arg2: PC of raise */
Loadglobal(5, caml_bottom_of_stack, 11) /* arg3: SP of raise */ lg 5, Caml_state(bottom_of_stack) /* arg3: SP of raise */
Loadglobal(6, caml_exception_pointer, 11) /* arg4: SP of handler */ lg 6, Caml_state(exception_pointer) /* arg4: SP of handler */
addi 1, 1, -(PARAM_SAVE_AREA + RESERVED_STACK) addi 1, 1, -(PARAM_SAVE_AREA + RESERVED_STACK)
/* reserve stack space for C call */ /* reserve stack space for C call */
bl caml_stash_backtrace bl caml_stash_backtrace
#if defined(MODEL_ppc64) || defined(MODEL_ppc64le) #if defined(MODEL_ppc64) || defined(MODEL_ppc64le)
nop nop
#endif #endif
mr 3, 28 /* restore exn bucket */ mr 3, 27 /* restore exn bucket */
b .L120 /* raise the exn */ b .L120 /* raise the exn */
ENDFUNCTION(caml_raise_exception) ENDFUNCTION(caml_raise_exception)
@ -437,7 +434,9 @@ FUNCTION(caml_start_program)
.cfi_startproc .cfi_startproc
#define STACKSIZE (WORD*18 + 8*18 + CALLBACK_LINK_SIZE + RESERVED_STACK) #define STACKSIZE (WORD*18 + 8*18 + CALLBACK_LINK_SIZE + RESERVED_STACK)
/* 18 callee-save GPR14...GPR31 + 18 callee-save FPR14...FPR31 */ /* 18 callee-save GPR14...GPR31 + 18 callee-save FPR14...FPR31 */
Addrglobal(12, caml_program) /* Domain state pointer is the first arg to caml_start_program. Move it */
mr START_PRG_DOMAIN_STATE_PTR, 3
Addrglobal(START_PRG_ARG, caml_program)
/* Code shared between caml_start_program and caml_callback */ /* Code shared between caml_start_program and caml_callback */
.L102: .L102:
/* Allocate and link stack frame */ /* Allocate and link stack frame */
@ -489,12 +488,14 @@ FUNCTION(caml_start_program)
stfdu 29, 8(11) stfdu 29, 8(11)
stfdu 30, 8(11) stfdu 30, 8(11)
stfdu 31, 8(11) stfdu 31, 8(11)
/* Load domain state pointer from argument */
mr DOMAIN_STATE_PTR, START_PRG_DOMAIN_STATE_PTR
/* Set up a callback link */ /* Set up a callback link */
Loadglobal(11, caml_bottom_of_stack, 11) lg 11, Caml_state(bottom_of_stack)
stg 11, CALLBACK_LINK_OFFSET(1) stg 11, CALLBACK_LINK_OFFSET(1)
Loadglobal(11, caml_last_return_address, 11) lg 11, Caml_state(last_return_address)
stg 11, (CALLBACK_LINK_OFFSET + WORD)(1) stg 11, (CALLBACK_LINK_OFFSET + WORD)(1)
Loadglobal(11, caml_gc_regs, 11) lg 11, Caml_state(gc_regs)
stg 11, (CALLBACK_LINK_OFFSET + 2 * WORD)(1) stg 11, (CALLBACK_LINK_OFFSET + 2 * WORD)(1)
/* Build an exception handler to catch exceptions escaping out of OCaml */ /* Build an exception handler to catch exceptions escaping out of OCaml */
bl .L103 bl .L103
@ -504,12 +505,12 @@ FUNCTION(caml_start_program)
.cfi_adjust_cfa_offset TRAP_SIZE .cfi_adjust_cfa_offset TRAP_SIZE
mflr 0 mflr 0
stg 0, TRAP_HANDLER_OFFSET(1) stg 0, TRAP_HANDLER_OFFSET(1)
Loadglobal(11, caml_exception_pointer, 11) lg 11, Caml_state(exception_pointer)
stg 11, TRAP_PREVIOUS_OFFSET(1) stg 11, TRAP_PREVIOUS_OFFSET(1)
mr 29, 1 mr TRAP_PTR, 1
/* Reload allocation pointers */ /* Reload allocation pointers */
Loadglobal(31, caml_young_ptr, 11) lg ALLOC_PTR, Caml_state(young_ptr)
Loadglobal(30, caml_young_limit, 11) lg ALLOC_LIMIT, Caml_state(young_limit)
/* Call the OCaml code (address in r12) */ /* Call the OCaml code (address in r12) */
#if defined(MODEL_ppc) #if defined(MODEL_ppc)
mtctr 12 mtctr 12
@ -531,19 +532,19 @@ FUNCTION(caml_start_program)
#endif #endif
/* Pop the trap frame, restoring caml_exception_pointer */ /* Pop the trap frame, restoring caml_exception_pointer */
lg 0, TRAP_PREVIOUS_OFFSET(1) lg 0, TRAP_PREVIOUS_OFFSET(1)
Storeglobal(0, caml_exception_pointer, 11) stg 0, Caml_state(exception_pointer)
addi 1, 1, TRAP_SIZE addi 1, 1, TRAP_SIZE
.cfi_adjust_cfa_offset -TRAP_SIZE .cfi_adjust_cfa_offset -TRAP_SIZE
/* Pop the callback link, restoring the global variables */ /* Pop the callback link, restoring the global variables */
.L106: .L106:
lg 0, CALLBACK_LINK_OFFSET(1) lg 0, CALLBACK_LINK_OFFSET(1)
Storeglobal(0, caml_bottom_of_stack, 11) stg 0, Caml_state(bottom_of_stack)
lg 0, (CALLBACK_LINK_OFFSET + WORD)(1) lg 0, (CALLBACK_LINK_OFFSET + WORD)(1)
Storeglobal(0, caml_last_return_address, 11) stg 0, Caml_state(last_return_address)
lg 0, (CALLBACK_LINK_OFFSET + 2 * WORD)(1) lg 0, (CALLBACK_LINK_OFFSET + 2 * WORD)(1)
Storeglobal(0, caml_gc_regs, 11) stg 0, Caml_state(gc_regs)
/* Update allocation pointer */ /* Update allocation pointer */
Storeglobal(31, caml_young_ptr, 11) stg ALLOC_PTR, Caml_state(young_ptr)
/* Restore callee-save registers */ /* Restore callee-save registers */
addi 11, 1, CALLBACK_LINK_SIZE + RESERVED_STACK - WORD addi 11, 1, CALLBACK_LINK_SIZE + RESERVED_STACK - WORD
lgu 14, WORD(11) lgu 14, WORD(11)
@ -596,7 +597,7 @@ FUNCTION(caml_start_program)
ld 2, (STACKSIZE + TOC_SAVE_PARENT)(1) ld 2, (STACKSIZE + TOC_SAVE_PARENT)(1)
#endif #endif
/* Update caml_exception_pointer */ /* Update caml_exception_pointer */
Storeglobal(29, caml_exception_pointer, 11) stg TRAP_PTR, Caml_state(exception_pointer)
/* Encode exception bucket as an exception result and return it */ /* Encode exception bucket as an exception result and return it */
ori 3, 3, 2 ori 3, 3, 2
b .L106 b .L106
@ -606,33 +607,39 @@ ENDFUNCTION(caml_start_program)
/* Callback from C to OCaml */ /* Callback from C to OCaml */
FUNCTION(caml_callback_exn) FUNCTION(caml_callback_asm)
/* Initial shuffling of arguments */ /* Initial shuffling of arguments */
mr 0, 3 /* Closure */ /* r3 = Caml_state, r4 = closure, 0(r5) = first arg */
mr 3, 4 /* Argument */ mr START_PRG_DOMAIN_STATE_PTR, 3
mr 4, 0 lg 3, 0(5) /* r3 = Argument */
lg 12, 0(4) /* Code pointer */ /* r4 = Closure */
lg START_PRG_ARG, 0(4) /* Code pointer */
b .L102 b .L102
ENDFUNCTION(caml_callback_exn) ENDFUNCTION(caml_callback_asm)
FUNCTION(caml_callback2_exn) FUNCTION(caml_callback2_asm)
mr 0, 3 /* Closure */ /* r3 = Caml_state, r4 = closure, 0(r5) = first arg,
mr 3, 4 /* First argument */ WORD(r5) = second arg */
mr 4, 5 /* Second argument */ mr START_PRG_DOMAIN_STATE_PTR, 3
mr 5, 0 mr 0, 4
Addrglobal(12, caml_apply2) lg 3, 0(5) /* r3 = First argument */
lg 4, WORD(5) /* r4 = Second argument */
mr 5, 0 /* r5 = Closure */
Addrglobal(START_PRG_ARG, caml_apply2)
b .L102 b .L102
ENDFUNCTION(caml_callback2_exn) ENDFUNCTION(caml_callback2_asm)
FUNCTION(caml_callback3_exn) FUNCTION(caml_callback3_asm)
mr 0, 3 /* Closure */ /* r3 = Caml_state, r4 = closure, 0(r5) = first arg, WORD(r5) = second arg,
mr 3, 4 /* First argument */ 2*WORD(r5) = third arg */
mr 4, 5 /* Second argument */ mr START_PRG_DOMAIN_STATE_PTR, 3
mr 5, 6 /* Third argument */ mr 6, 4 /* r6 = Closure */
mr 6, 0 lg 3, 0(5) /* r3 = First argument */
Addrglobal(12, caml_apply3) lg 4, WORD(5) /* r4 = Second argument */
lg 5, 2*WORD(5) /* r5 = Third argument */
Addrglobal(START_PRG_ARG, caml_apply3)
b .L102 b .L102
ENDFUNCTION(caml_callback3_exn) ENDFUNCTION(caml_callback3_asm)
#if defined(MODEL_ppc64) #if defined(MODEL_ppc64)
.section ".opd","aw" .section ".opd","aw"
@ -664,15 +671,7 @@ caml_system__frametable:
TOCENTRY(caml_apply2) TOCENTRY(caml_apply2)
TOCENTRY(caml_apply3) TOCENTRY(caml_apply3)
TOCENTRY(caml_backtrace_active)
TOCENTRY(caml_backtrace_pos)
TOCENTRY(caml_bottom_of_stack)
TOCENTRY(caml_exception_pointer)
TOCENTRY(caml_gc_regs)
TOCENTRY(caml_last_return_address)
TOCENTRY(caml_program) TOCENTRY(caml_program)
TOCENTRY(caml_young_limit)
TOCENTRY(caml_young_ptr)
#endif #endif

View File

@ -118,18 +118,18 @@ static void default_fatal_uncaught_exception(value exn)
msg = caml_format_exception(exn); msg = caml_format_exception(exn);
/* Perform "at_exit" processing, ignoring all exceptions that may /* Perform "at_exit" processing, ignoring all exceptions that may
be triggered by this */ be triggered by this */
saved_backtrace_active = caml_backtrace_active; saved_backtrace_active = Caml_state->backtrace_active;
saved_backtrace_pos = caml_backtrace_pos; saved_backtrace_pos = Caml_state->backtrace_pos;
caml_backtrace_active = 0; Caml_state->backtrace_active = 0;
at_exit = caml_named_value("Pervasives.do_at_exit"); at_exit = caml_named_value("Pervasives.do_at_exit");
if (at_exit != NULL) caml_callback_exn(*at_exit, Val_unit); if (at_exit != NULL) caml_callback_exn(*at_exit, Val_unit);
caml_backtrace_active = saved_backtrace_active; Caml_state->backtrace_active = saved_backtrace_active;
caml_backtrace_pos = saved_backtrace_pos; Caml_state->backtrace_pos = saved_backtrace_pos;
/* Display the uncaught exception */ /* Display the uncaught exception */
fprintf(stderr, "Fatal error: exception %s\n", msg); fprintf(stderr, "Fatal error: exception %s\n", msg);
caml_stat_free(msg); caml_stat_free(msg);
/* Display the backtrace if available */ /* Display the backtrace if available */
if (caml_backtrace_active && !DEBUGGER_IN_USE) if (Caml_state->backtrace_active && !DEBUGGER_IN_USE)
caml_print_exception_backtrace(); caml_print_exception_backtrace();
} }

View File

@ -27,8 +27,6 @@
#include "caml/roots.h" #include "caml/roots.h"
#include "caml/stacks.h" #include "caml/stacks.h"
CAMLexport struct caml__roots_block *caml_local_roots = NULL;
CAMLexport void (*caml_scan_roots_hook) (scanning_action f) = NULL; CAMLexport void (*caml_scan_roots_hook) (scanning_action f) = NULL;
/* FIXME should rename to [caml_oldify_minor_roots] and synchronise with /* FIXME should rename to [caml_oldify_minor_roots] and synchronise with
@ -42,11 +40,11 @@ void caml_oldify_local_roots (void)
intnat i, j; intnat i, j;
/* The stack */ /* The stack */
for (sp = caml_extern_sp; sp < caml_stack_high; sp++) { for (sp = Caml_state->extern_sp; sp < Caml_state->stack_high; sp++) {
caml_oldify_one (*sp, sp); caml_oldify_one (*sp, sp);
} }
/* Local C roots */ /* FIXME do the old-frame trick ? */ /* Local C roots */ /* FIXME do the old-frame trick ? */
for (lr = caml_local_roots; lr != NULL; lr = lr->next) { for (lr = Caml_state->local_roots; lr != NULL; lr = lr->next) {
for (i = 0; i < lr->ntables; i++){ for (i = 0; i < lr->ntables; i++){
for (j = 0; j < lr->nitems; j++){ for (j = 0; j < lr->nitems; j++){
sp = &(lr->tables[i][j]); sp = &(lr->tables[i][j]);
@ -85,7 +83,8 @@ void caml_do_roots (scanning_action f, int do_globals)
f(caml_global_data, &caml_global_data); f(caml_global_data, &caml_global_data);
CAML_INSTR_TIME (tmr, "major_roots/global"); CAML_INSTR_TIME (tmr, "major_roots/global");
/* The stack and the local C roots */ /* The stack and the local C roots */
caml_do_local_roots(f, caml_extern_sp, caml_stack_high, caml_local_roots); caml_do_local_roots(f, Caml_state->extern_sp, Caml_state->stack_high,
Caml_state->local_roots);
CAML_INSTR_TIME (tmr, "major_roots/local"); CAML_INSTR_TIME (tmr, "major_roots/local");
/* Global C roots */ /* Global C roots */
caml_scan_global_roots(f); caml_scan_global_roots(f);

View File

@ -31,8 +31,6 @@
/* Roots registered from C functions */ /* Roots registered from C functions */
struct caml__roots_block *caml_local_roots = NULL;
void (*caml_scan_roots_hook) (scanning_action) = NULL; void (*caml_scan_roots_hook) (scanning_action) = NULL;
/* The hashtable of frame descriptors */ /* The hashtable of frame descriptors */
@ -220,10 +218,6 @@ void caml_unregister_frametable(intnat *table) {
/* Communication with [caml_start_program] and [caml_call_gc]. */ /* Communication with [caml_start_program] and [caml_call_gc]. */
char * caml_top_of_stack;
char * caml_bottom_of_stack = NULL; /* no stack initially */
uintnat caml_last_return_address = 1; /* not in OCaml code initially */
value * caml_gc_regs;
intnat caml_globals_inited = 0; intnat caml_globals_inited = 0;
static intnat caml_globals_scanned = 0; static intnat caml_globals_scanned = 0;
static link * caml_dyn_globals = NULL; static link * caml_dyn_globals = NULL;
@ -271,9 +265,9 @@ void caml_oldify_local_roots (void)
} }
/* The stack and local roots */ /* The stack and local roots */
sp = caml_bottom_of_stack; sp = Caml_state->bottom_of_stack;
retaddr = caml_last_return_address; retaddr = Caml_state->last_return_address;
regs = caml_gc_regs; regs = Caml_state->gc_regs;
if (sp != NULL) { if (sp != NULL) {
while (1) { while (1) {
/* Find the descriptor corresponding to the return address */ /* Find the descriptor corresponding to the return address */
@ -316,7 +310,7 @@ void caml_oldify_local_roots (void)
} }
} }
/* Local C roots */ /* Local C roots */
for (lr = caml_local_roots; lr != NULL; lr = lr->next) { for (lr = Caml_state->local_roots; lr != NULL; lr = lr->next) {
for (i = 0; i < lr->ntables; i++){ for (i = 0; i < lr->ntables; i++){
for (j = 0; j < lr->nitems; j++){ for (j = 0; j < lr->nitems; j++){
root = &(lr->tables[i][j]); root = &(lr->tables[i][j]);
@ -414,8 +408,9 @@ void caml_do_roots (scanning_action f, int do_globals)
} }
CAML_INSTR_TIME (tmr, "major_roots/dynamic_global"); CAML_INSTR_TIME (tmr, "major_roots/dynamic_global");
/* The stack and local roots */ /* The stack and local roots */
caml_do_local_roots(f, caml_bottom_of_stack, caml_last_return_address, caml_do_local_roots(f, Caml_state->bottom_of_stack,
caml_gc_regs, caml_local_roots); Caml_state->last_return_address, Caml_state->gc_regs,
Caml_state->local_roots);
CAML_INSTR_TIME (tmr, "major_roots/local"); CAML_INSTR_TIME (tmr, "major_roots/local");
/* Global C roots */ /* Global C roots */
caml_scan_global_roots(f); caml_scan_global_roots(f);
@ -499,7 +494,8 @@ uintnat (*caml_stack_usage_hook)(void) = NULL;
uintnat caml_stack_usage (void) uintnat caml_stack_usage (void)
{ {
uintnat sz; uintnat sz;
sz = (value *) caml_top_of_stack - (value *) caml_bottom_of_stack; sz = (value *) Caml_state->top_of_stack -
(value *) Caml_state->bottom_of_stack;
if (caml_stack_usage_hook != NULL) if (caml_stack_usage_hook != NULL)
sz += (*caml_stack_usage_hook)(); sz += (*caml_stack_usage_hook)();
return sz; return sz;

View File

@ -19,30 +19,21 @@
#define Addrglobal(reg,glob) \ #define Addrglobal(reg,glob) \
lgrl reg, glob@GOTENT lgrl reg, glob@GOTENT
#define Loadglobal(reg,glob) \
lgrl %r1, glob@GOTENT; lg reg, 0(%r1)
#define Storeglobal(reg,glob) \
lgrl %r1, glob@GOTENT; stg reg, 0(%r1)
#define Loadglobal32(reg,glob) \
lgrl %r1, glob@GOTENT; lgf reg, 0(%r1)
#define Storeglobal32(reg,glob) \
lgrl %r1, glob@GOTENT; sty reg, 0(%r1)
#else #else
#define Addrglobal(reg,glob) \ #define Addrglobal(reg,glob) \
larl reg, glob larl reg, glob
#define Loadglobal(reg,glob) \
lgrl reg, glob
#define Storeglobal(reg,glob) \
stgrl reg, glob
#define Loadglobal32(reg,glob) \
lgfrl reg, glob
#define Storeglobal32(reg,glob) \
strl reg, glob
#endif #endif
.set domain_curr_field, 0
#define DOMAIN_STATE(c_type, name) \
.equ domain_field_caml_##name, domain_curr_field ; \
.set domain_curr_field, domain_curr_field + 1
#include "../runtime/caml/domain_state.tbl"
#undef DOMAIN_STATE
#define Caml_state(var) 8*domain_field_caml_##var(%r10)
.section ".text" .section ".text"
/* Invoke the garbage collector. */ /* Invoke the garbage collector. */
@ -57,17 +48,17 @@ caml_call_gc:
#define FRAMESIZE (16*8 + 16*8) #define FRAMESIZE (16*8 + 16*8)
lay %r15, -FRAMESIZE(%r15) lay %r15, -FRAMESIZE(%r15)
/* Record return address into OCaml code */ /* Record return address into OCaml code */
Storeglobal(%r14, caml_last_return_address) stg %r14, Caml_state(last_return_address)
/* Record lowest stack address */ /* Record lowest stack address */
lay %r0, FRAMESIZE(%r15) lay %r0, FRAMESIZE(%r15)
Storeglobal(%r0, caml_bottom_of_stack) stg %r0, Caml_state(bottom_of_stack)
/* Record pointer to register array */ /* Record pointer to register array */
lay %r0, (8*16)(%r15) lay %r0, (8*16)(%r15)
Storeglobal(%r0, caml_gc_regs) stg %r0, Caml_state(gc_regs)
/* Save current allocation pointer for debugging purposes */ /* Save current allocation pointer for debugging purposes */
Storeglobal(%r11, caml_young_ptr) stg %r11, Caml_state(young_ptr)
/* Save exception pointer (if e.g. a sighandler raises) */ /* Save exception pointer (if e.g. a sighandler raises) */
Storeglobal(%r13, caml_exception_pointer) stg %r13, Caml_state(exception_pointer)
/* Save all registers used by the code generator */ /* Save all registers used by the code generator */
stmg %r2,%r9, (8*16)(%r15) stmg %r2,%r9, (8*16)(%r15)
stg %r12, (8*16 + 8*8)(%r15) stg %r12, (8*16 + 8*8)(%r15)
@ -88,13 +79,12 @@ caml_call_gc:
std %f14, 112(%r15) std %f14, 112(%r15)
std %f15, 120(%r15) std %f15, 120(%r15)
/* Call the GC */ /* Call the GC */
lay %r15, -160(%r15) lay %r15, -160(%r15)
stg %r15, 0(%r15) stg %r15, 0(%r15)
brasl %r14, caml_garbage_collection@PLT brasl %r14, caml_garbage_collection@PLT
lay %r15, 160(%r15) lay %r15, 160(%r15)
/* Reload new allocation pointer and allocation limit */ /* Reload new allocation pointer */
Loadglobal(%r11, caml_young_ptr) lg %r11, Caml_state(young_ptr)
Loadglobal(%r10, caml_young_limit)
/* Restore all regs used by the code generator */ /* Restore all regs used by the code generator */
lmg %r2,%r9, (8*16)(%r15) lmg %r2,%r9, (8*16)(%r15)
lg %r12, (8*16 + 8*8)(%r15) lg %r12, (8*16 + 8*8)(%r15)
@ -115,34 +105,33 @@ caml_call_gc:
ld %f14, 112(%r15) ld %f14, 112(%r15)
ld %f15, 120(%r15) ld %f15, 120(%r15)
/* Return to caller */ /* Return to caller */
Loadglobal(%r1, caml_last_return_address) lg %r1, Caml_state(last_return_address)
/* Deallocate stack frame */ /* Deallocate stack frame */
lay %r15, FRAMESIZE(%r15) lay %r15, FRAMESIZE(%r15)
/* Return */ /* Return */
br %r1 br %r1
/* Call a C function from OCaml */ /* Call a C function from OCaml */
.globl caml_c_call .globl caml_c_call
.type caml_c_call, @function .type caml_c_call, @function
caml_c_call: caml_c_call:
Storeglobal(%r15, caml_bottom_of_stack) stg %r15, Caml_state(bottom_of_stack)
.L101: .L101:
/* Save return address */ /* Save return address */
ldgr %f15, %r14 ldgr %f15, %r14
/* Get ready to call C function (address in r7) */ /* Get ready to call C function (address in r7) */
/* Record lowest stack address and return address */ /* Record lowest stack address and return address */
Storeglobal(%r14, caml_last_return_address) stg %r14, Caml_state(last_return_address)
/* Make the exception handler and alloc ptr available to the C code */ /* Make the exception handler and alloc ptr available to the C code */
Storeglobal(%r11, caml_young_ptr) stg %r11, Caml_state(young_ptr)
Storeglobal(%r13, caml_exception_pointer) stg %r13, Caml_state(exception_pointer)
/* Call the function */ /* Call the function */
basr %r14, %r7 basr %r14, %r7
/* restore return address */ /* restore return address */
lgdr %r14,%f15 lgdr %r14,%f15
/* Reload allocation pointer and allocation limit*/ /* Reload allocation pointer */
Loadglobal(%r11, caml_young_ptr) lg %r11, Caml_state(young_ptr)
Loadglobal(%r10, caml_young_limit)
/* Return to caller */ /* Return to caller */
br %r14 br %r14
@ -150,24 +139,24 @@ caml_c_call:
.globl caml_raise_exn .globl caml_raise_exn
.type caml_raise_exn, @function .type caml_raise_exn, @function
caml_raise_exn: caml_raise_exn:
Loadglobal32(%r0, caml_backtrace_active) lg %r0, Caml_state(backtrace_active)
cgfi %r0, 0 cgfi %r0, 0
jne .L110 jne .L110
.L111: .L111:
/* Pop trap frame */ /* Pop trap frame */
lg %r1, 0(%r13) lg %r1, 0(%r13)
lgr %r15, %r13 lgr %r15, %r13
lg %r13, 8(13) lg %r13, 8(13)
agfi %r15, 16 agfi %r15, 16
/* Branch to handler */ /* Branch to handler */
br %r1 br %r1
.L110: .L110:
ldgr %f15, %r2 /* preserve exn bucket in callee-save reg */ ldgr %f15, %r2 /* preserve exn bucket in callee-save reg */
/* arg1: exception bucket, already in r3 */ /* arg1: exception bucket, already in r2 */
lgr %r3,%r14 /* arg2: PC of raise */ lgr %r3, %r14 /* arg2: PC of raise */
lgr %r4, %r15 /* arg3: SP of raise */ lgr %r4, %r15 /* arg3: SP of raise */
lgr %r5, %r13 /* arg4: SP of handler */ lgr %r5, %r13 /* arg4: SP of handler */
agfi %r15, -160 /* reserve stack space for C call */ agfi %r15, -160 /* reserve stack space for C call */
brasl %r14, caml_stash_backtrace@PLT brasl %r14, caml_stash_backtrace@PLT
agfi %r15, 160 agfi %r15, 160
lgdr %r2,%f15 /* restore exn bucket */ lgdr %r2,%f15 /* restore exn bucket */
@ -178,14 +167,15 @@ caml_raise_exn:
.globl caml_raise_exception .globl caml_raise_exception
.type caml_raise_exception, @function .type caml_raise_exception, @function
caml_raise_exception: caml_raise_exception:
Loadglobal32(%r0, caml_backtrace_active) lgr %r10, %r2 /* Load domain state pointer */
lgr %r2, %r3 /* Move exception bucket to arg1 register */
lg %r0, Caml_state(backtrace_active)
cgfi %r0, 0 cgfi %r0, 0
jne .L112 jne .L112
.L113: .L113:
/* Reload OCaml global registers */ /* Reload OCaml global registers */
Loadglobal(%r15, caml_exception_pointer) lg %r15, Caml_state(exception_pointer)
Loadglobal(%r11, caml_young_ptr) lg %r11, Caml_state(young_ptr)
Loadglobal(%r10, caml_young_limit)
/* Pop trap frame */ /* Pop trap frame */
lg %r1, 0(%r15) lg %r1, 0(%r15)
lg %r13, 8(%r15) lg %r13, 8(%r15)
@ -193,17 +183,17 @@ caml_raise_exception:
/* Branch to handler */ /* Branch to handler */
br %r1; br %r1;
.L112: .L112:
lgfi %r0, 0 lgfi %r0, 0
Storeglobal32(%r0, caml_backtrace_pos) stg %r0, Caml_state(backtrace_pos)
ldgr %f15,%r2 /* preserve exn bucket in callee-save reg */ ldgr %f15,%r2 /* preserve exn bucket in callee-save reg */
/* arg1: exception bucket, already in r2 */ /* arg1: exception bucket, already in r2 */
Loadglobal(%r3, caml_last_return_address) /* arg2: PC of raise */ lg %r3, Caml_state(last_return_address) /* arg2: PC of raise */
Loadglobal(%r4, caml_bottom_of_stack) /* arg3: SP of raise */ lg %r4, Caml_state(bottom_of_stack) /* arg3: SP of raise */
Loadglobal(%r5, caml_exception_pointer) /* arg4: SP of handler */ lg %r5, Caml_state(exception_pointer) /* arg4: SP of handler */
/* reserve stack space for C call */ /* reserve stack space for C call */
lay %r15, -160(%r15) lay %r15, -160(%r15)
brasl %r14, caml_stash_backtrace@PLT brasl %r14, caml_stash_backtrace@PLT
lay %r15, 160(%r15) lay %r15, 160(%r15)
lgdr %r2,%f15 /* restore exn bucket */ lgdr %r2,%f15 /* restore exn bucket */
j .L113 /* raise the exn */ j .L113 /* raise the exn */
@ -212,6 +202,8 @@ caml_raise_exception:
.globl caml_start_program .globl caml_start_program
.type caml_start_program, @function .type caml_start_program, @function
caml_start_program: caml_start_program:
/* Move Caml_state passed as first argument to r1 */
lgr %r1, %r2
Addrglobal(%r0, caml_program) Addrglobal(%r0, caml_program)
/* Code shared between caml_start_program and caml_callback */ /* Code shared between caml_start_program and caml_callback */
@ -231,13 +223,15 @@ caml_start_program:
std %f14, 120(%r15) std %f14, 120(%r15)
std %f15, 128(%r15) std %f15, 128(%r15)
/* Load Caml_state to r10 register */
lgr %r10, %r1
/* Set up a callback link */ /* Set up a callback link */
lay %r15, -32(%r15) lay %r15, -32(%r15)
Loadglobal(%r1, caml_bottom_of_stack) lg %r1, Caml_state(bottom_of_stack)
stg %r1, 0(%r15) stg %r1, 0(%r15)
Loadglobal(%r1, caml_last_return_address) lg %r1, Caml_state(last_return_address)
stg %r1, 8(%r15) stg %r1, 8(%r15)
Loadglobal(%r1, caml_gc_regs) lg %r1, Caml_state(gc_regs)
stg %r1, 16(%r15) stg %r1, 16(%r15)
/* Build an exception handler to catch exceptions escaping out of OCaml */ /* Build an exception handler to catch exceptions escaping out of OCaml */
brasl %r14, .L103 brasl %r14, .L103
@ -245,43 +239,42 @@ caml_start_program:
.L103: .L103:
lay %r15, -16(%r15) lay %r15, -16(%r15)
stg %r14, 0(%r15) stg %r14, 0(%r15)
Loadglobal(%r1, caml_exception_pointer) lg %r1, Caml_state(exception_pointer)
stg %r1, 8(%r15) stg %r1, 8(%r15)
lgr %r13, %r15 lgr %r13, %r15
/* Reload allocation pointers */ /* Reload allocation pointer */
Loadglobal(%r11, caml_young_ptr) lg %r11, Caml_state(young_ptr)
Loadglobal(%r10, caml_young_limit)
/* Call the OCaml code */ /* Call the OCaml code */
lgr %r1,%r0 lgr %r1,%r0
basr %r14, %r1 basr %r14, %r1
.L105: .L105:
/* Pop the trap frame, restoring caml_exception_pointer */ /* Pop the trap frame, restoring caml_exception_pointer */
lg %r0, 8(%r15) lg %r0, 8(%r15)
Storeglobal(%r0, caml_exception_pointer) stg %r0, Caml_state(exception_pointer)
la %r15, 16(%r15) la %r15, 16(%r15)
/* Pop the callback link, restoring the global variables */ /* Pop the callback link, restoring the global variables */
.L106: .L106:
lg %r5, 0(%r15) lg %r5, 0(%r15)
lg %r6, 8(%r15) lg %r6, 8(%r15)
lg %r0, 16(%r15) lg %r0, 16(%r15)
Storeglobal(%r5, caml_bottom_of_stack) stg %r5, Caml_state(bottom_of_stack)
Storeglobal(%r6, caml_last_return_address) stg %r6, Caml_state(last_return_address)
Storeglobal(%r0, caml_gc_regs) stg %r0, Caml_state(gc_regs)
la %r15, 32(%r15) la %r15, 32(%r15)
/* Update allocation pointer */ /* Update allocation pointer */
Storeglobal(%r11, caml_young_ptr) stg %r11, Caml_state(young_ptr)
/* Restore registers */ /* Restore registers */
lmg %r6,%r14, 0(%r15) lmg %r6,%r14, 0(%r15)
ld %f8, 72(%r15) ld %f8, 72(%r15)
ld %f9, 80(%r15) ld %f9, 80(%r15)
ld %f10, 88(%r15) ld %f10, 88(%r15)
ld %f11, 96(%r15) ld %f11, 96(%r15)
ld %f12, 104(%r15) ld %f12, 104(%r15)
ld %f13, 112(%r15) ld %f13, 112(%r15)
ld %f14, 120(%r15) ld %f14, 120(%r15)
ld %f15, 128(%r15) ld %f15, 128(%r15)
/* Return */ /* Return */
lay %r15, 144(%r15) lay %r15, 144(%r15)
@ -290,42 +283,49 @@ caml_start_program:
/* The trap handler: */ /* The trap handler: */
.L104: .L104:
/* Update caml_exception_pointer */ /* Update caml_exception_pointer */
Storeglobal(%r13, caml_exception_pointer) stg %r13, Caml_state(exception_pointer)
/* Encode exception bucket as an exception result and return it */ /* Encode exception bucket as an exception result and return it */
oill %r2, 2 oill %r2, 2
j .L106 j .L106
/* Callback from C to OCaml */ /* Callback from C to OCaml */
.globl caml_callback_exn .globl caml_callback_asm
.type caml_callback_exn, @function .type caml_callback_asm, @function
caml_callback_exn: caml_callback_asm:
/* Initial shuffling of arguments */ /* Initial shuffling of arguments */
lgr %r0, %r2 /* Closure */ /* (r2 = Caml_state, r3 = closure, 0(r4) = arg1) */
lgr %r2, %r3 /* Argument */ lgr %r1, %r2 /* r1 = Caml_state */
lgr %r3, %r0 lg %r2, 0(%r4) /* r2 = Argument */
lg %r0, 0(%r3) /* Code pointer */ /* r3 = Closure */
lg %r0, 0(%r3) /* r0 = Code pointer */
j .L102 j .L102
.globl caml_callback2_exn .globl caml_callback2_asm
.type caml_callback2_exn, @function .type caml_callback2_asm, @function
caml_callback2_exn: caml_callback2_asm:
lgr %r0, %r2 /* Closure */ /* Initial shuffling of arguments */
lgr %r2, %r3 /* First argument */ /* (r2 = Caml_state, r3 = closure, 0(r4) = arg1, 8(r4) = arg2) */
lgr %r3, %r4 /* Second argument */ lgr %r1, %r2 /* r1 = Caml_state */
lgr %r4, %r0 lgr %r0, %r3
Addrglobal(%r0, caml_apply2) lg %r2, 0(%r4) /* r2 = First argument */
lg %r3, 8(%r4) /* r3 = Second argument */
lgr %r4, %r0 /* r4 = Closure */
Addrglobal(%r0, caml_apply2) /* r0 = Code pointer */
j .L102 j .L102
.globl caml_callback3_exn .globl caml_callback3_asm
.type caml_callback3_exn, @function .type caml_callback3_asm, @function
caml_callback3_exn: caml_callback3_asm:
lgr %r0, %r2 /* Closure */ /* Initial shuffling of arguments */
lgr %r2, %r3 /* First argument */ /* (r2 = Caml_state, r3 = closure, 0(r4) = arg1, 8(r4) = arg2,
lgr %r3, %r4 /* Second argument */ 16(r4) = arg3) */
lgr %r4, %r5 /* Third argument */ lgr %r1, %r2 /* r1 = Caml_state */
lgr %r5, %r0 lgr %r5, %r3 /* r5 = Closure */
Addrglobal(%r0, caml_apply3) lg %r2, 0(%r4) /* r2 = First argument */
lg %r3, 8(%r4) /* r3 = Second argument */
lg %r4, 16(%r4) /* r4 = Third argument */
Addrglobal(%r0, caml_apply3) /* r0 = Code pointer */
j .L102 j .L102
.globl caml_ml_array_bound_error .globl caml_ml_array_bound_error
@ -333,7 +333,7 @@ caml_callback3_exn:
caml_ml_array_bound_error: caml_ml_array_bound_error:
/* Save return address before decrementing SP, otherwise /* Save return address before decrementing SP, otherwise
the frame descriptor for the call site is not correct */ the frame descriptor for the call site is not correct */
Storeglobal(%r15, caml_bottom_of_stack) stg %r15, Caml_state(bottom_of_stack)
lay %r15, -160(%r15) /* Reserve stack space for C call */ lay %r15, -160(%r15) /* Reserve stack space for C call */
Addrglobal(%r7, caml_array_bound_error) Addrglobal(%r7, caml_array_bound_error)
j .L101 j .L101

View File

@ -109,10 +109,10 @@ void caml_set_something_to_do(void)
caml_something_to_do = 1; caml_something_to_do = 1;
#ifdef NATIVE_CODE #ifdef NATIVE_CODE
/* When this function is called without [caml_c_call] (e.g., in /* When this function is called without [caml_c_call] (e.g., in
[caml_modify]), this is only moderately effective on ports that [caml_modify]), this is only moderately effective on ports that cache
cache [caml_young_limit] in a register, so it may take a while [Caml_state->young_limit] in a register, so it may take a while before the
before the register is reloaded from [caml_young_limit]. */ register is reloaded from [Caml_state->young_limit]. */
caml_young_limit = caml_young_alloc_end; Caml_state->young_limit = Caml_state->young_alloc_end;
#endif #endif
} }
@ -265,29 +265,27 @@ void caml_execute_signal(int signal_number, int in_signal_handler)
void caml_update_young_limit (void) void caml_update_young_limit (void)
{ {
/* The minor heap grows downwards. The first trigger is the largest one. */ /* The minor heap grows downwards. The first trigger is the largest one. */
caml_young_limit = caml_memprof_young_trigger < caml_young_trigger ? Caml_state->young_limit =
caml_young_trigger : caml_memprof_young_trigger; caml_memprof_young_trigger < Caml_state->young_trigger ?
Caml_state->young_trigger : caml_memprof_young_trigger;
#ifdef NATIVE_CODE #ifdef NATIVE_CODE
if(caml_something_to_do) if(caml_something_to_do)
caml_young_limit = caml_young_alloc_end; Caml_state->young_limit = Caml_state->young_alloc_end;
#endif #endif
} }
/* Arrange for a garbage collection to be performed as soon as possible */ /* Arrange for a garbage collection to be performed as soon as possible */
int volatile caml_requested_major_slice = 0;
int volatile caml_requested_minor_gc = 0;
void caml_request_major_slice (void) void caml_request_major_slice (void)
{ {
caml_requested_major_slice = 1; Caml_state->requested_major_slice = 1;
caml_set_something_to_do(); caml_set_something_to_do();
} }
void caml_request_minor_gc (void) void caml_request_minor_gc (void)
{ {
caml_requested_minor_gc = 1; Caml_state->requested_minor_gc = 1;
caml_set_something_to_do(); caml_set_something_to_do();
} }
@ -299,7 +297,7 @@ CAMLexport value caml_check_urgent_gc (value extra_root)
#ifdef NATIVE_CODE #ifdef NATIVE_CODE
caml_update_young_limit(); caml_update_young_limit();
#endif #endif
if (caml_requested_major_slice || caml_requested_minor_gc){ if (Caml_state->requested_major_slice || Caml_state->requested_minor_gc){
CAML_INSTR_INT ("force_minor/check_urgent_gc@", 1); CAML_INSTR_INT ("force_minor/check_urgent_gc@", 1);
caml_gc_dispatch(); caml_gc_dispatch();
} }

View File

@ -86,13 +86,13 @@ void caml_garbage_collection(void)
be correctly implemented. be correctly implemented.
*/ */
caml_memprof_renew_minor_sample(); caml_memprof_renew_minor_sample();
if (caml_requested_major_slice || caml_requested_minor_gc || if (Caml_state->requested_major_slice || Caml_state->requested_minor_gc ||
caml_young_ptr - caml_young_trigger < Max_young_whsize){ Caml_state->young_ptr - Caml_state->young_trigger < Max_young_whsize){
caml_gc_dispatch (); caml_gc_dispatch ();
} }
#ifdef WITH_SPACETIME #ifdef WITH_SPACETIME
if (caml_young_ptr == caml_young_alloc_end) { if (Caml_state->young_ptr == Caml_state->young_alloc_end) {
caml_spacetime_automatic_snapshot(); caml_spacetime_automatic_snapshot();
} }
#endif #endif
@ -114,12 +114,12 @@ DECLARE_SIGNAL_HANDLER(handle_signal)
caml_enter_blocking_section_hook(); caml_enter_blocking_section_hook();
} else { } else {
caml_record_signal(sig); caml_record_signal(sig);
/* Some ports cache [caml_young_limit] in a register. /* Some ports cache [Caml_state->young_limit] in a register.
Use the signal context to modify that register too, but only if Use the signal context to modify that register too, but only if
we are inside OCaml code (not inside C code). */ we are inside OCaml code (not inside C code). */
#if defined(CONTEXT_PC) && defined(CONTEXT_YOUNG_LIMIT) #if defined(CONTEXT_PC) && defined(CONTEXT_YOUNG_LIMIT)
if (Is_in_code_area(CONTEXT_PC)) if (Is_in_code_area(CONTEXT_PC))
CONTEXT_YOUNG_LIMIT = (context_reg) caml_young_limit; CONTEXT_YOUNG_LIMIT = (context_reg) Caml_state->young_limit;
#endif #endif
} }
errno = saved_errno; errno = saved_errno;
@ -182,10 +182,10 @@ DECLARE_SIGNAL_HANDLER(trap_handler)
caml_sigmask_hook(SIG_UNBLOCK, &mask, NULL); caml_sigmask_hook(SIG_UNBLOCK, &mask, NULL);
} }
#endif #endif
caml_exception_pointer = (char *) CONTEXT_EXCEPTION_POINTER; Caml_state->exception_pointer = (char *) CONTEXT_EXCEPTION_POINTER;
caml_young_ptr = (value *) CONTEXT_YOUNG_PTR; Caml_state->young_ptr = (value *) CONTEXT_YOUNG_PTR;
caml_bottom_of_stack = (char *) CONTEXT_SP; Caml_state->bottom_of_stack = (char *) CONTEXT_SP;
caml_last_return_address = (uintnat) CONTEXT_PC; Caml_state->last_return_address = (uintnat) CONTEXT_PC;
caml_array_bound_error(); caml_array_bound_error();
} }
#endif #endif
@ -207,7 +207,7 @@ static char sig_alt_stack[SIGSTKSZ];
#endif #endif
#ifdef RETURN_AFTER_STACK_OVERFLOW #ifdef RETURN_AFTER_STACK_OVERFLOW
extern void caml_stack_overflow(void); extern void caml_stack_overflow(caml_domain_state*);
#endif #endif
DECLARE_SIGNAL_HANDLER(segv_handler) DECLARE_SIGNAL_HANDLER(segv_handler)
@ -234,6 +234,7 @@ DECLARE_SIGNAL_HANDLER(segv_handler)
handler, we jump to the asm function [caml_stack_overflow] handler, we jump to the asm function [caml_stack_overflow]
(from $ARCH.S). */ (from $ARCH.S). */
#ifdef CONTEXT_PC #ifdef CONTEXT_PC
CONTEXT_C_ARG_1 = (context_reg) Caml_state;
CONTEXT_PC = (context_reg) &caml_stack_overflow; CONTEXT_PC = (context_reg) &caml_stack_overflow;
#else #else
#error "CONTEXT_PC must be defined if RETURN_AFTER_STACK_OVERFLOW is" #error "CONTEXT_PC must be defined if RETURN_AFTER_STACK_OVERFLOW is"
@ -241,8 +242,8 @@ DECLARE_SIGNAL_HANDLER(segv_handler)
#else #else
/* Raise a Stack_overflow exception straight from this signal handler */ /* Raise a Stack_overflow exception straight from this signal handler */
#if defined(CONTEXT_YOUNG_PTR) && defined(CONTEXT_EXCEPTION_POINTER) #if defined(CONTEXT_YOUNG_PTR) && defined(CONTEXT_EXCEPTION_POINTER)
caml_exception_pointer = (char *) CONTEXT_EXCEPTION_POINTER; Caml_state->exception_pointer == (char *) CONTEXT_EXCEPTION_POINTER;
caml_young_ptr = (value *) CONTEXT_YOUNG_PTR; Caml_state->young_ptr = (value *) CONTEXT_YOUNG_PTR;
#endif #endif
caml_raise_stack_overflow(); caml_raise_stack_overflow();
#endif #endif

View File

@ -27,8 +27,8 @@
sigact.sa_flags = SA_SIGINFO sigact.sa_flags = SA_SIGINFO
typedef greg_t context_reg; typedef greg_t context_reg;
#define CONTEXT_C_ARG_1 (context->uc_mcontext.gregs[REG_RDI])
#define CONTEXT_PC (context->uc_mcontext.gregs[REG_RIP]) #define CONTEXT_PC (context->uc_mcontext.gregs[REG_RIP])
#define CONTEXT_EXCEPTION_POINTER (context->uc_mcontext.gregs[REG_R14])
#define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15]) #define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15])
#define CONTEXT_FAULTING_ADDRESS ((char *)context->uc_mcontext.gregs[REG_CR2]) #define CONTEXT_FAULTING_ADDRESS ((char *)context->uc_mcontext.gregs[REG_CR2])
@ -55,8 +55,8 @@
typedef unsigned long long context_reg; typedef unsigned long long context_reg;
#define CONTEXT_STATE (((ucontext_t *)context)->uc_mcontext->CONTEXT_REG(ss)) #define CONTEXT_STATE (((ucontext_t *)context)->uc_mcontext->CONTEXT_REG(ss))
#define CONTEXT_C_ARG_1 (CONTEXT_STATE.CONTEXT_REG(rdi))
#define CONTEXT_PC (CONTEXT_STATE.CONTEXT_REG(rip)) #define CONTEXT_PC (CONTEXT_STATE.CONTEXT_REG(rip))
#define CONTEXT_EXCEPTION_POINTER (CONTEXT_STATE.CONTEXT_REG(r14))
#define CONTEXT_YOUNG_PTR (CONTEXT_STATE.CONTEXT_REG(r15)) #define CONTEXT_YOUNG_PTR (CONTEXT_STATE.CONTEXT_REG(r15))
#define CONTEXT_SP (CONTEXT_STATE.CONTEXT_REG(rsp)) #define CONTEXT_SP (CONTEXT_STATE.CONTEXT_REG(rsp))
#define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr) #define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr)
@ -137,7 +137,7 @@
typedef greg_t context_reg; typedef greg_t context_reg;
#define CONTEXT_PC (context->uc_mcontext.gregs[REG_RIP]) #define CONTEXT_PC (context->uc_mcontext.gregs[REG_RIP])
#define CONTEXT_EXCEPTION_POINTER (context->uc_mcontext.gregs[REG_R14]) #define CONTEXT_C_ARG_1 (context->uc_mcontext.gregs[REG_RDI])
#define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15]) #define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15])
#define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr) #define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr)
@ -153,7 +153,7 @@
sigact.sa_flags = SA_SIGINFO sigact.sa_flags = SA_SIGINFO
#define CONTEXT_PC (context->sc_rip) #define CONTEXT_PC (context->sc_rip)
#define CONTEXT_EXCEPTION_POINTER (context->sc_r14) #define CONTEXT_C_ARG_1 (context->sc_rdi)
#define CONTEXT_YOUNG_PTR (context->sc_r15) #define CONTEXT_YOUNG_PTR (context->sc_r15)
#define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr) #define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr)
@ -170,7 +170,7 @@
sigact.sa_flags = SA_SIGINFO sigact.sa_flags = SA_SIGINFO
#define CONTEXT_PC (_UC_MACHINE_PC(context)) #define CONTEXT_PC (_UC_MACHINE_PC(context))
#define CONTEXT_EXCEPTION_POINTER (context->uc_mcontext.gregs[REG_R14]) #define CONTEXT_C_ARG_1 (context->uc_mcontext.gregs[REG_RDI])
#define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15]) #define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15])
#define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr) #define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr)

View File

@ -24,24 +24,19 @@
#include "caml/mlvalues.h" #include "caml/mlvalues.h"
#include "caml/stacks.h" #include "caml/stacks.h"
CAMLexport value * caml_stack_low;
CAMLexport value * caml_stack_high;
CAMLexport value * caml_stack_threshold;
CAMLexport value * caml_extern_sp;
CAMLexport value * caml_trapsp;
CAMLexport value * caml_trap_barrier;
value caml_global_data = 0; value caml_global_data = 0;
uintnat caml_max_stack_size; /* also used in gc_ctrl.c */ uintnat caml_max_stack_size; /* also used in gc_ctrl.c */
void caml_init_stack (uintnat initial_max_size) void caml_init_stack (uintnat initial_max_size)
{ {
caml_stack_low = (value *) caml_stat_alloc(Stack_size); Caml_state->stack_low = (value *) caml_stat_alloc(Stack_size);
caml_stack_high = caml_stack_low + Stack_size / sizeof (value); Caml_state->stack_high = Caml_state->stack_low + Stack_size / sizeof (value);
caml_stack_threshold = caml_stack_low + Stack_threshold / sizeof (value); Caml_state->stack_threshold =
caml_extern_sp = caml_stack_high; Caml_state->stack_low + Stack_threshold / sizeof (value);
caml_trapsp = caml_stack_high; Caml_state->extern_sp = Caml_state->stack_high;
caml_trap_barrier = caml_stack_high + 1; Caml_state->trapsp = Caml_state->stack_high;
Caml_state->trap_barrier = Caml_state->stack_high + 1;
caml_max_stack_size = initial_max_size; caml_max_stack_size = initial_max_size;
caml_gc_message (0x08, "Initial stack limit: %" caml_gc_message (0x08, "Initial stack limit: %"
ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n", ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n",
@ -54,12 +49,13 @@ void caml_realloc_stack(asize_t required_space)
value * new_low, * new_high, * new_sp; value * new_low, * new_high, * new_sp;
value * p; value * p;
CAMLassert(caml_extern_sp >= caml_stack_low); CAMLassert(Caml_state->extern_sp >= Caml_state->stack_low);
size = caml_stack_high - caml_stack_low; size = Caml_state->stack_high - Caml_state->stack_low;
do { do {
if (size >= caml_max_stack_size) caml_raise_stack_overflow(); if (size >= caml_max_stack_size) caml_raise_stack_overflow();
size *= 2; size *= 2;
} while (size < caml_stack_high - caml_extern_sp + required_space); } while (size < Caml_state->stack_high - Caml_state->extern_sp
+ required_space);
caml_gc_message (0x08, "Growing stack to %" caml_gc_message (0x08, "Growing stack to %"
ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n", ARCH_INTNAT_PRINTF_FORMAT "uk bytes\n",
(uintnat) size * sizeof(value) / 1024); (uintnat) size * sizeof(value) / 1024);
@ -67,21 +63,22 @@ void caml_realloc_stack(asize_t required_space)
new_high = new_low + size; new_high = new_low + size;
#define shift(ptr) \ #define shift(ptr) \
((char *) new_high - ((char *) caml_stack_high - (char *) (ptr))) ((char *) new_high - ((char *) Caml_state->stack_high - (char *) (ptr)))
new_sp = (value *) shift(caml_extern_sp); new_sp = (value *) shift(Caml_state->extern_sp);
memmove((char *) new_sp, memmove((char *) new_sp,
(char *) caml_extern_sp, (char *) Caml_state->extern_sp,
(caml_stack_high - caml_extern_sp) * sizeof(value)); (Caml_state->stack_high - Caml_state->extern_sp) * sizeof(value));
caml_stat_free(caml_stack_low); caml_stat_free(Caml_state->stack_low);
caml_trapsp = (value *) shift(caml_trapsp); Caml_state->trapsp = (value *) shift(Caml_state->trapsp);
caml_trap_barrier = (value *) shift(caml_trap_barrier); Caml_state->trap_barrier = (value *) shift(Caml_state->trap_barrier);
for (p = caml_trapsp; p < new_high; p = Trap_link(p)) for (p = Caml_state->trapsp; p < new_high; p = Trap_link(p))
Trap_link(p) = (value *) shift(Trap_link(p)); Trap_link(p) = (value *) shift(Trap_link(p));
caml_stack_low = new_low; Caml_state->stack_low = new_low;
caml_stack_high = new_high; Caml_state->stack_high = new_high;
caml_stack_threshold = caml_stack_low + Stack_threshold / sizeof (value); Caml_state->stack_threshold =
caml_extern_sp = new_sp; Caml_state->stack_low + Stack_threshold / sizeof (value);
Caml_state->extern_sp = new_sp;
#undef shift #undef shift
} }
@ -89,13 +86,14 @@ void caml_realloc_stack(asize_t required_space)
CAMLprim value caml_ensure_stack_capacity(value required_space) CAMLprim value caml_ensure_stack_capacity(value required_space)
{ {
asize_t req = Long_val(required_space); asize_t req = Long_val(required_space);
if (caml_extern_sp - req < caml_stack_low) caml_realloc_stack(req); if (Caml_state->extern_sp - req < Caml_state->stack_low)
caml_realloc_stack(req);
return Val_unit; return Val_unit;
} }
void caml_change_max_stack_size (uintnat new_max_size) void caml_change_max_stack_size (uintnat new_max_size)
{ {
asize_t size = caml_stack_high - caml_extern_sp asize_t size = Caml_state->stack_high - Caml_state->extern_sp
+ Stack_threshold / sizeof (value); + Stack_threshold / sizeof (value);
if (new_max_size < size) new_max_size = size; if (new_max_size < size) new_max_size = size;
@ -112,7 +110,7 @@ CAMLexport uintnat (*caml_stack_usage_hook)(void) = NULL;
uintnat caml_stack_usage(void) uintnat caml_stack_usage(void)
{ {
uintnat sz; uintnat sz;
sz = caml_stack_high - caml_extern_sp; sz = Caml_state->stack_high - Caml_state->extern_sp;
if (caml_stack_usage_hook != NULL) if (caml_stack_usage_hook != NULL)
sz += (*caml_stack_usage_hook)(); sz += (*caml_stack_usage_hook)();
return sz; return sz;

View File

@ -33,6 +33,7 @@
#include "caml/callback.h" #include "caml/callback.h"
#include "caml/custom.h" #include "caml/custom.h"
#include "caml/debugger.h" #include "caml/debugger.h"
#include "caml/domain.h"
#include "caml/dynlink.h" #include "caml/dynlink.h"
#include "caml/exec.h" #include "caml/exec.h"
#include "caml/fail.h" #include "caml/fail.h"
@ -334,6 +335,9 @@ CAMLexport void caml_main(char_os **argv)
caml_ensure_spacetime_dot_o_is_included++; caml_ensure_spacetime_dot_o_is_included++;
/* Initialize the domain */
caml_init_domain();
/* Determine options */ /* Determine options */
#ifdef DEBUG #ifdef DEBUG
caml_verb_gc = 0x3F; caml_verb_gc = 0x3F;
@ -354,7 +358,6 @@ CAMLexport void caml_main(char_os **argv)
#endif #endif
caml_init_custom_operations(); caml_init_custom_operations();
caml_ext_table_init(&caml_shared_libs_path, 8); caml_ext_table_init(&caml_shared_libs_path, 8);
caml_external_raise = NULL;
/* Determine position of bytecode file */ /* Determine position of bytecode file */
pos = 0; pos = 0;
@ -453,13 +456,13 @@ CAMLexport void caml_main(char_os **argv)
caml_debugger(PROGRAM_START, Val_unit); caml_debugger(PROGRAM_START, Val_unit);
res = caml_interprete(caml_start_code, caml_code_size); res = caml_interprete(caml_start_code, caml_code_size);
if (Is_exception_result(res)) { if (Is_exception_result(res)) {
caml_exn_bucket = Extract_exception(res); Caml_state->exn_bucket = Extract_exception(res);
if (caml_debugger_in_use) { if (caml_debugger_in_use) {
caml_extern_sp = &caml_exn_bucket; /* The debugger needs the Caml_state->extern_sp = &Caml_state->exn_bucket; /* The debugger needs the
exception value.*/ exception value.*/
caml_debugger(UNCAUGHT_EXC, Val_unit); caml_debugger(UNCAUGHT_EXC, Val_unit);
} }
caml_fatal_uncaught_exception(caml_exn_bucket); caml_fatal_uncaught_exception(Caml_state->exn_bucket);
} }
} }
@ -475,6 +478,8 @@ CAMLexport value caml_startup_code_exn(
char_os * cds_file; char_os * cds_file;
char_os * exe_name; char_os * exe_name;
/* Initialize the domain */
caml_init_domain();
/* Determine options */ /* Determine options */
#ifdef DEBUG #ifdef DEBUG
caml_verb_gc = 0x3F; caml_verb_gc = 0x3F;
@ -500,7 +505,6 @@ CAMLexport value caml_startup_code_exn(
} }
exe_name = caml_executable_name(); exe_name = caml_executable_name();
if (exe_name == NULL) exe_name = caml_search_exe_in_path(argv[0]); if (exe_name == NULL) exe_name = caml_search_exe_in_path(argv[0]);
caml_external_raise = NULL;
/* Initialize the abstract machine */ /* Initialize the abstract machine */
caml_init_gc (caml_init_minor_heap_wsz, caml_init_heap_wsz, caml_init_gc (caml_init_minor_heap_wsz, caml_init_heap_wsz,
caml_init_heap_chunk_sz, caml_init_percent_free, caml_init_heap_chunk_sz, caml_init_percent_free,
@ -552,12 +556,12 @@ CAMLexport void caml_startup_code(
section_table, section_table_size, section_table, section_table_size,
pooling, argv); pooling, argv);
if (Is_exception_result(res)) { if (Is_exception_result(res)) {
caml_exn_bucket = Extract_exception(res); Caml_state->exn_bucket = Extract_exception(res);
if (caml_debugger_in_use) { if (caml_debugger_in_use) {
caml_extern_sp = &caml_exn_bucket; /* The debugger needs the Caml_state->extern_sp = &Caml_state->exn_bucket; /* The debugger needs the
exception value.*/ exception value.*/
caml_debugger(UNCAUGHT_EXC, Val_unit); caml_debugger(UNCAUGHT_EXC, Val_unit);
} }
caml_fatal_uncaught_exception(caml_exn_bucket); caml_fatal_uncaught_exception(Caml_state->exn_bucket);
} }
} }

View File

@ -23,6 +23,7 @@
#include "caml/backtrace.h" #include "caml/backtrace.h"
#include "caml/custom.h" #include "caml/custom.h"
#include "caml/debugger.h" #include "caml/debugger.h"
#include "caml/domain.h"
#include "caml/fail.h" #include "caml/fail.h"
#include "caml/freelist.h" #include "caml/freelist.h"
#include "caml/gc.h" #include "caml/gc.h"
@ -90,7 +91,7 @@ static void init_static(void)
struct longjmp_buffer caml_termination_jmpbuf; struct longjmp_buffer caml_termination_jmpbuf;
void (*caml_termination_hook)(void *) = NULL; void (*caml_termination_hook)(void *) = NULL;
extern value caml_start_program (void); extern value caml_start_program (caml_domain_state*);
extern void caml_init_ieee_floats (void); extern void caml_init_ieee_floats (void);
extern void caml_init_signals (void); extern void caml_init_signals (void);
#ifdef _WIN32 #ifdef _WIN32
@ -109,6 +110,8 @@ value caml_startup_common(char_os **argv, int pooling)
char_os * exe_name, * proc_self_exe; char_os * exe_name, * proc_self_exe;
char tos; char tos;
/* Initialize the domain */
caml_init_domain();
/* Determine options */ /* Determine options */
#ifdef DEBUG #ifdef DEBUG
caml_verb_gc = 0x3F; caml_verb_gc = 0x3F;
@ -132,7 +135,7 @@ value caml_startup_common(char_os **argv, int pooling)
caml_install_invalid_parameter_handler(); caml_install_invalid_parameter_handler();
#endif #endif
caml_init_custom_operations(); caml_init_custom_operations();
caml_top_of_stack = &tos; Caml_state->top_of_stack = &tos;
caml_init_gc (caml_init_minor_heap_wsz, caml_init_heap_wsz, caml_init_gc (caml_init_minor_heap_wsz, caml_init_heap_wsz,
caml_init_heap_chunk_sz, caml_init_percent_free, caml_init_heap_chunk_sz, caml_init_percent_free,
caml_init_max_percent_free, caml_init_major_window, caml_init_max_percent_free, caml_init_major_window,
@ -157,7 +160,7 @@ value caml_startup_common(char_os **argv, int pooling)
if (caml_termination_hook != NULL) caml_termination_hook(NULL); if (caml_termination_hook != NULL) caml_termination_hook(NULL);
return Val_unit; return Val_unit;
} }
return caml_start_program(); return caml_start_program(Caml_state);
} }
value caml_startup_exn(char_os **argv) value caml_startup_exn(char_os **argv)

View File

@ -118,17 +118,18 @@ CAMLprim value caml_sys_exit(value retcode_v)
if ((caml_verb_gc & 0x400) != 0) { if ((caml_verb_gc & 0x400) != 0) {
/* cf caml_gc_counters */ /* cf caml_gc_counters */
double minwords = caml_stat_minor_words double minwords = Caml_state->stat_minor_words
+ (double) (caml_young_end - caml_young_ptr); + (double) (Caml_state->young_end - Caml_state->young_ptr);
double prowords = caml_stat_promoted_words; double prowords = Caml_state->stat_promoted_words;
double majwords = caml_stat_major_words + (double) caml_allocated_words; double majwords =
Caml_state->stat_major_words + (double) caml_allocated_words;
double allocated_words = minwords + majwords - prowords; double allocated_words = minwords + majwords - prowords;
intnat mincoll = caml_stat_minor_collections; intnat mincoll = Caml_state->stat_minor_collections;
intnat majcoll = caml_stat_major_collections; intnat majcoll = Caml_state->stat_major_collections;
intnat heap_words = caml_stat_heap_wsz; intnat heap_words = Caml_state->stat_heap_wsz;
intnat heap_chunks = caml_stat_heap_chunks; intnat heap_chunks = Caml_state->stat_heap_chunks;
intnat top_heap_words = caml_stat_top_heap_wsz; intnat top_heap_words = Caml_state->stat_top_heap_wsz;
intnat cpct = caml_stat_compactions; intnat cpct = Caml_state->stat_compactions;
caml_gc_message(0x400, "allocated_words: %.0f\n", allocated_words); caml_gc_message(0x400, "allocated_words: %.0f\n", allocated_words);
caml_gc_message(0x400, "minor_words: %.0f\n", minwords); caml_gc_message(0x400, "minor_words: %.0f\n", minwords);
caml_gc_message(0x400, "promoted_words: %.0f\n", prowords); caml_gc_message(0x400, "promoted_words: %.0f\n", prowords);

View File

@ -189,7 +189,7 @@ static void do_set (value ar, mlsize_t offset, value v)
value old = Field (ar, offset); value old = Field (ar, offset);
Field (ar, offset) = v; Field (ar, offset) = v;
if (!(Is_block (old) && Is_young (old))){ if (!(Is_block (old) && Is_young (old))){
add_to_ephe_ref_table (&caml_ephe_ref_table, ar, offset); add_to_ephe_ref_table (Caml_state->ephe_ref_table, ar, offset);
} }
}else{ }else{
Field (ar, offset) = v; Field (ar, offset) = v;

View File

@ -560,8 +560,6 @@ static LONG CALLBACK
} }
#else #else
extern char *caml_exception_pointer;
extern value *caml_young_ptr;
/* Do not use the macro from address_class.h here. */ /* Do not use the macro from address_class.h here. */
#undef Is_in_code_area #undef Is_in_code_area
@ -589,8 +587,7 @@ static LONG CALLBACK
faulting_address = exn_info->ExceptionRecord->ExceptionInformation[1]; faulting_address = exn_info->ExceptionRecord->ExceptionInformation[1];
/* refresh runtime parameters from registers */ /* refresh runtime parameters from registers */
caml_exception_pointer = (char *) ctx->R14; Caml_state->young_ptr = (value *) ctx->R15;
caml_young_ptr = (value *) ctx->R15;
/* call caml_reset_stack(faulting_address) using the alternate stack */ /* call caml_reset_stack(faulting_address) using the alternate stack */
alt_rsp = win32_alt_stack + sizeof(win32_alt_stack) / sizeof(uintnat); alt_rsp = win32_alt_stack + sizeof(win32_alt_stack) / sizeof(uintnat);

View File

@ -0,0 +1,16 @@
(* TEST
modules = "stub.c"
* pass
** bytecode
** native
* pass
flags = "-ccopt -DCAML_NAME_SPACE"
** bytecode
** native
*)
external retrieve_young_limit : 'a -> nativeint = "retrieve_young_limit"
let bar =
let foo = Bytes.create 4 in
retrieve_young_limit foo

View File

@ -0,0 +1 @@
v is young

View File

@ -0,0 +1 @@
main.ml

View File

@ -0,0 +1,18 @@
#include <caml/minor_gc.h>
#include <caml/memory.h>
#include <caml/mlvalues.h>
#include <caml/alloc.h>
#include <caml/address_class.h>
/* see PR#8892 */
typedef char * addr;
CAMLprim value retrieve_young_limit(value v)
{
CAMLparam1(v);
printf("v is%s young\n", (Is_young(v) ? "" : " not"));
#ifdef CAML_NAME_SPACE
CAMLreturn(caml_copy_nativeint((intnat)caml_young_limit));
#else
CAMLreturn(copy_nativeint((intnat)young_limit));
#endif
}

View File

@ -49,9 +49,7 @@ G(call_gen_code):
G(caml_c_call): G(caml_c_call):
jmp *%eax jmp *%eax
.comm G(caml_exception_pointer), 4 .comm G(Caml_state), 4
.comm G(young_ptr), 4
.comm G(young_start), 4
/* Some tests are designed to cause registers to spill; on /* Some tests are designed to cause registers to spill; on
* x86 we require the caml_extra_params symbol from the RTS. */ * x86 we require the caml_extra_params symbol from the RTS. */

View File

@ -61,11 +61,7 @@ _caml_raise_exn:
int 3 int 3
.DATA .DATA
PUBLIC _caml_exception_pointer PUBLIC _Caml_state
_caml_exception_pointer dword 0 _Caml_state dword 0
PUBLIC _caml_young_ptr
_caml_young_ptr dword 0
PUBLIC _caml_young_limit
_caml_young_limit dword 0
END END

View File

@ -63,8 +63,9 @@ let keyword_table =
"mulh", MULH; "mulh", MULH;
"or", OR; "or", OR;
"proj", PROJ; "proj", PROJ;
"raise_withtrace", RAISE Cmm.Raise_withtrace; "raise", RAISE Lambda.Raise_regular;
"raise_notrace", RAISE Cmm.Raise_notrace; "reraise", RAISE Lambda.Raise_reraise;
"raise_notrace", RAISE Lambda.Raise_notrace;
"seq", SEQ; "seq", SEQ;
"signed", SIGNED; "signed", SIGNED;
"skip", SKIP; "skip", SKIP;

View File

@ -127,7 +127,7 @@ let access_array base numelt size =
%token OR %token OR
%token <int> POINTER %token <int> POINTER
%token PROJ %token PROJ
%token <Cmm.raise_kind> RAISE %token <Lambda.raise_kind> RAISE
%token RBRACKET %token RBRACKET
%token RPAREN %token RPAREN
%token SEQ %token SEQ

Some files were not shown because too many files have changed in this diff Show More