diff --git a/cranelift/codegen/src/inst_predicates.rs b/cranelift/codegen/src/inst_predicates.rs index 7b42e1728663..c40a835b048a 100644 --- a/cranelift/codegen/src/inst_predicates.rs +++ b/cranelift/codegen/src/inst_predicates.rs @@ -150,6 +150,12 @@ pub fn has_memory_fence_semantics(op: Opcode) -> bool { | Opcode::Debugtrap | Opcode::SequencePoint => true, Opcode::Call | Opcode::CallIndirect | Opcode::TryCall | Opcode::TryCallIndirect => true, + // N.B.: this is *load-bearing for borrow safety and + // provenance in Wasmtime*. A trapping op can potentially + // cause an implicit hostcall, and that hostcall implicitly + // mutably borrows Wasmtime's Store. So we can't allow alias + // anslysis to cross trapping opcodes; they are implicitly + // as-if they called the host. op if op.can_trap() => true, _ => false, } diff --git a/crates/cranelift/src/bounds_checks.rs b/crates/cranelift/src/bounds_checks.rs index c0dc9ad664dd..2806e21c3976 100644 --- a/crates/cranelift/src/bounds_checks.rs +++ b/crates/cranelift/src/bounds_checks.rs @@ -22,7 +22,7 @@ use crate::{ Reachability, func_environ::FuncEnvironment, - translate::{HeapData, TargetEnvironment}, + translate::{FuncTranslationStacks, HeapData, TargetEnvironment}, }; use Reachability::*; use cranelift_codegen::{ @@ -84,12 +84,15 @@ pub fn bounds_check_and_compute_addr( index: ir::Value, bounds_check: BoundsCheck, trap: ir::TrapCode, + stacks: &FuncTranslationStacks, ) -> Reachability { match bounds_check { BoundsCheck::StaticOffset { offset, access_size, - } => bounds_check_field_access(builder, env, heap, index, offset, access_size, trap), + } => { + bounds_check_field_access(builder, env, heap, index, offset, access_size, trap, stacks) + } #[cfg(feature = "gc")] BoundsCheck::StaticObjectField { @@ -113,6 +116,7 @@ pub fn bounds_check_and_compute_addr( 0, object_size, trap, + stacks, ) { Reachable(v) => v, u @ Unreachable => return u, @@ -123,7 +127,7 @@ pub fn bounds_check_and_compute_addr( } // Otherwise, bounds check just this one field's access. - bounds_check_field_access(builder, env, heap, index, offset, access_size, trap) + bounds_check_field_access(builder, env, heap, index, offset, access_size, trap, stacks) } // Compute the index of the end of the object, bounds check that and get @@ -148,6 +152,7 @@ pub fn bounds_check_and_compute_addr( 0, 0, trap, + stacks, ) { Reachable(v) => v, u @ Unreachable => return u, @@ -177,6 +182,7 @@ fn bounds_check_field_access( offset: u32, access_size: u8, trap: ir::TrapCode, + stacks: &FuncTranslationStacks, ) -> Reachability { let pointer_bit_width = u16::try_from(env.pointer_type().bits()).unwrap(); let bound_gv = heap.bound; @@ -298,7 +304,7 @@ fn bounds_check_field_access( // max_memory_size`, since we will end up being out-of-bounds regardless // of the given `index`. env.before_unconditionally_trapping_memory_access(builder); - env.trap(builder, trap); + env.trap(builder, trap, stacks); return Unreachable; } @@ -308,7 +314,7 @@ fn bounds_check_field_access( // native pointer type anyway, so this is an unconditional trap. if pointer_bit_width < 64 && offset_and_size >= (1 << pointer_bit_width) { env.before_unconditionally_trapping_memory_access(builder); - env.trap(builder, trap); + env.trap(builder, trap, stacks); return Unreachable; } @@ -430,6 +436,7 @@ fn bounds_check_field_access( AddrPcc::static32(heap.pcc_memory_type, memory_reservation), oob, trap, + stacks, )); } @@ -464,6 +471,7 @@ fn bounds_check_field_access( AddrPcc::dynamic(heap.pcc_memory_type, bound_gv), oob, trap, + stacks, )); } @@ -513,6 +521,7 @@ fn bounds_check_field_access( AddrPcc::dynamic(heap.pcc_memory_type, bound_gv), oob, trap, + stacks, )); } @@ -558,6 +567,7 @@ fn bounds_check_field_access( AddrPcc::dynamic(heap.pcc_memory_type, bound_gv), oob, trap, + stacks, )); } @@ -575,7 +585,7 @@ fn bounds_check_field_access( builder.func.dfg.facts[access_size_val] = Some(Fact::constant(pointer_bit_width, offset_and_size)); } - let adjusted_index = env.uadd_overflow_trap(builder, index, access_size_val, trap); + let adjusted_index = env.uadd_overflow_trap(builder, index, access_size_val, trap, stacks); if pcc { builder.func.dfg.facts[adjusted_index] = Some(Fact::value_offset( pointer_bit_width, @@ -603,6 +613,7 @@ fn bounds_check_field_access( AddrPcc::dynamic(heap.pcc_memory_type, bound_gv), oob, trap, + stacks, )) } @@ -756,9 +767,10 @@ fn explicit_check_oob_condition_and_compute_addr( // in bounds (and therefore we can proceed). oob_condition: ir::Value, trap: ir::TrapCode, + stacks: &FuncTranslationStacks, ) -> ir::Value { if let OobBehavior::ExplicitTrap = oob_behavior { - env.trapnz(builder, oob_condition, trap); + env.trapnz(builder, oob_condition, trap, stacks); } let addr_ty = env.pointer_type(); diff --git a/crates/cranelift/src/compiler.rs b/crates/cranelift/src/compiler.rs index 0c001f6b18d4..c2af9cb9a52c 100644 --- a/crates/cranelift/src/compiler.rs +++ b/crates/cranelift/src/compiler.rs @@ -1620,15 +1620,17 @@ fn save_last_wasm_exit_fp_and_pc( ptr: &impl PtrSize, limits: Value, ) { - // Save the trampoline FP to the limits. Exception unwind needs - // this so that it can know the SP (bottom of frame) for the very - // last Wasm frame. + // Save the Wasm frame exit FP to the limits. We have the + // trampoline FP here; load the next FP in the chain. let trampoline_fp = builder.ins().get_frame_pointer(pointer_type); + let wasm_fp = builder + .ins() + .load(pointer_type, MemFlags::trusted(), trampoline_fp, 0); builder.ins().store( MemFlags::trusted(), - trampoline_fp, + wasm_fp, limits, - ptr.vmstore_context_last_wasm_exit_trampoline_fp(), + ptr.vmstore_context_last_wasm_exit_fp(), ); // Finally save the Wasm return address to the limits. diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 52208e7d908a..5188e519f176 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -835,6 +835,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> { table_index: TableIndex, index: ir::Value, cold_blocks: bool, + stacks: &FuncTranslationStacks, ) -> ir::Value { let pointer_type = self.pointer_type(); let table_data = self.get_or_create_table(builder.func, table_index); @@ -843,7 +844,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> { // contents, we check for a null entry here, and // if null, we take a slow-path that invokes a // libcall. - let (table_entry_addr, flags) = table_data.prepare_table_addr(self, builder, index); + let (table_entry_addr, flags) = table_data.prepare_table_addr(self, builder, index, stacks); let value = builder.ins().load(pointer_type, flags, table_entry_addr, 0); if !self.tunables.table_lazy_init { @@ -1028,6 +1029,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> { builder: &mut FunctionBuilder, trap_cond: ir::Value, trap: ir::TrapCode, + stacks: &FuncTranslationStacks, ) { assert!(!self.clif_instruction_traps_enabled()); @@ -1043,17 +1045,22 @@ impl<'module_environment> FuncEnvironment<'module_environment> { builder.seal_block(continuation_block); builder.switch_to_block(trap_block); - self.trap(builder, trap); + self.trap(builder, trap, stacks); builder.switch_to_block(continuation_block); } /// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to /// test whether the divisor is zero. - fn guard_zero_divisor(&mut self, builder: &mut FunctionBuilder, rhs: ir::Value) { + fn guard_zero_divisor( + &mut self, + builder: &mut FunctionBuilder, + rhs: ir::Value, + stacks: &FuncTranslationStacks, + ) { if self.clif_instruction_traps_enabled() { return; } - self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO); + self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO, stacks); } /// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to @@ -1063,11 +1070,12 @@ impl<'module_environment> FuncEnvironment<'module_environment> { builder: &mut FunctionBuilder, lhs: ir::Value, rhs: ir::Value, + stacks: &FuncTranslationStacks, ) { if self.clif_instruction_traps_enabled() { return; } - self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO); + self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO, stacks); let ty = builder.func.dfg.value_type(rhs); let minus_one = builder.ins().iconst(ty, -1); @@ -1082,7 +1090,12 @@ impl<'module_environment> FuncEnvironment<'module_environment> { ); let lhs_is_int_min = builder.ins().icmp(IntCC::Equal, lhs, int_min); let is_integer_overflow = builder.ins().band(rhs_is_minus_one, lhs_is_int_min); - self.conditionally_trap(builder, is_integer_overflow, ir::TrapCode::INTEGER_OVERFLOW); + self.conditionally_trap( + builder, + is_integer_overflow, + ir::TrapCode::INTEGER_OVERFLOW, + stacks, + ); } /// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to @@ -1093,6 +1106,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> { ty: ir::Type, val: ir::Value, signed: bool, + stacks: &FuncTranslationStacks, ) { assert!(!self.clif_instruction_traps_enabled()); let val_ty = builder.func.dfg.value_type(val); @@ -1102,19 +1116,24 @@ impl<'module_environment> FuncEnvironment<'module_environment> { builder.ins().fpromote(F64, val) }; let isnan = builder.ins().fcmp(FloatCC::NotEqual, val, val); - self.trapnz(builder, isnan, ir::TrapCode::BAD_CONVERSION_TO_INTEGER); + self.trapnz( + builder, + isnan, + ir::TrapCode::BAD_CONVERSION_TO_INTEGER, + stacks, + ); let val = self.trunc_f64(builder, val); let (lower_bound, upper_bound) = f64_cvt_to_int_bounds(signed, ty.bits()); let lower_bound = builder.ins().f64const(lower_bound); let too_small = builder .ins() .fcmp(FloatCC::LessThanOrEqual, val, lower_bound); - self.trapnz(builder, too_small, ir::TrapCode::INTEGER_OVERFLOW); + self.trapnz(builder, too_small, ir::TrapCode::INTEGER_OVERFLOW, stacks); let upper_bound = builder.ins().f64const(upper_bound); let too_large = builder .ins() .fcmp(FloatCC::GreaterThanOrEqual, val, upper_bound); - self.trapnz(builder, too_large, ir::TrapCode::INTEGER_OVERFLOW); + self.trapnz(builder, too_large, ir::TrapCode::INTEGER_OVERFLOW, stacks); } /// Get the `ir::Type` for a `VMSharedTypeIndex`. @@ -2052,6 +2071,7 @@ impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> { table_index, callee, cold_blocks, + self.stack, ); // If necessary, check the signature. @@ -2151,10 +2171,12 @@ impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> { self.builder, funcref_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL, + self.stack, ); } } - self.env.trap(self.builder, crate::TRAP_BAD_SIGNATURE); + self.env + .trap(self.builder, crate::TRAP_BAD_SIGNATURE, self.stack); return CheckIndirectCallTypeSignature::StaticTrap; } } @@ -2164,7 +2186,7 @@ impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> { WasmHeapType::NoFunc => { assert!(table.ref_type.nullable); self.env - .trap(self.builder, crate::TRAP_INDIRECT_CALL_TO_NULL); + .trap(self.builder, crate::TRAP_INDIRECT_CALL_TO_NULL, self.stack); return CheckIndirectCallTypeSignature::StaticTrap; } @@ -2207,8 +2229,12 @@ impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> { if self.env.clif_memory_traps_enabled() { mem_flags = mem_flags.with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL)); } else { - self.env - .trapz(self.builder, funcref_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL); + self.env.trapz( + self.builder, + funcref_ptr, + crate::TRAP_INDIRECT_CALL_TO_NULL, + self.stack, + ); } let callee_sig_id = self.env @@ -2232,7 +2258,7 @@ impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> { .icmp(IntCC::Equal, callee_sig_id, caller_sig_id) }; self.env - .trapz(self.builder, matches, crate::TRAP_BAD_SIGNATURE); + .trapz(self.builder, matches, crate::TRAP_BAD_SIGNATURE, self.stack); CheckIndirectCallTypeSignature::Runtime } @@ -2290,7 +2316,7 @@ impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> { callee_flags = callee_flags.with_trap_code(callee_load_trap_code); } else { if let Some(trap) = callee_load_trap_code { - self.env.trapz(self.builder, callee, trap); + self.env.trapz(self.builder, callee, trap, self.stack); } } let func_addr = self.builder.ins().load( @@ -2587,6 +2613,7 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, table_index: TableIndex, index: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { let table = self.module.tables[table_index]; let table_data = self.get_or_create_table(builder.func, table_index); @@ -2594,24 +2621,30 @@ impl FuncEnvironment<'_> { match heap_ty.top() { // GC-managed types. WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => { - let (src, flags) = table_data.prepare_table_addr(self, builder, index); + let (src, flags) = table_data.prepare_table_addr(self, builder, index, stacks); gc::gc_compiler(self)?.translate_read_gc_reference( self, builder, table.ref_type, src, flags, + stacks, ) } // Function types. - WasmHeapTopType::Func => { - Ok(self.get_or_init_func_ref_table_elem(builder, table_index, index, false)) - } + WasmHeapTopType::Func => Ok(self.get_or_init_func_ref_table_elem( + builder, + table_index, + index, + false, + stacks, + )), // Continuation types. WasmHeapTopType::Cont => { - let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index); + let (elem_addr, flags) = + table_data.prepare_table_addr(self, builder, index, stacks); Ok(builder.ins().load( stack_switching::fatpointer::fatpointer_type(self), flags, @@ -2628,6 +2661,7 @@ impl FuncEnvironment<'_> { table_index: TableIndex, value: ir::Value, index: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { let table = self.module.tables[table_index]; let table_data = self.get_or_create_table(builder.func, table_index); @@ -2635,7 +2669,7 @@ impl FuncEnvironment<'_> { match heap_ty.top() { // GC-managed types. WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => { - let (dst, flags) = table_data.prepare_table_addr(self, builder, index); + let (dst, flags) = table_data.prepare_table_addr(self, builder, index, stacks); gc::gc_compiler(self)?.translate_write_gc_reference( self, builder, @@ -2643,12 +2677,14 @@ impl FuncEnvironment<'_> { dst, value, flags, + stacks, ) } // Function types. WasmHeapTopType::Func => { - let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index); + let (elem_addr, flags) = + table_data.prepare_table_addr(self, builder, index, stacks); // Set the "initialized bit". See doc-comment on // `FUNCREF_INIT_BIT` in // crates/environ/src/ref_bits.rs for details. @@ -2667,7 +2703,8 @@ impl FuncEnvironment<'_> { // Continuation types. WasmHeapTopType::Cont => { - let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index); + let (elem_addr, flags) = + table_data.prepare_table_addr(self, builder, index, stacks); builder.ins().store(flags, value, elem_addr, 0); Ok(()) } @@ -2732,11 +2769,12 @@ impl FuncEnvironment<'_> { &mut self, builder: &mut FunctionBuilder, i31ref: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { // TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref // null i31)`, we could omit the `trapz`. But plumbing that type info // from `wasmparser` and through to here is a bit funky. - self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE); + self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE, stacks); Ok(builder.ins().sshr_imm(i31ref, 1)) } @@ -2744,11 +2782,12 @@ impl FuncEnvironment<'_> { &mut self, builder: &mut FunctionBuilder, i31ref: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { // TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref // null i31)`, we could omit the `trapz`. But plumbing that type info // from `wasmparser` and through to here is a bit funky. - self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE); + self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE, stacks); Ok(builder.ins().ushr_imm(i31ref, 1)) } @@ -2765,16 +2804,18 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, struct_type_index: TypeIndex, fields: StructFieldsVec, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_struct_new(self, builder, struct_type_index, &fields) + gc::translate_struct_new(self, builder, struct_type_index, &fields, stacks) } pub fn translate_struct_new_default( &mut self, builder: &mut FunctionBuilder, struct_type_index: TypeIndex, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_struct_new_default(self, builder, struct_type_index) + gc::translate_struct_new_default(self, builder, struct_type_index, stacks) } pub fn translate_struct_get( @@ -2784,6 +2825,7 @@ impl FuncEnvironment<'_> { field_index: u32, struct_ref: ir::Value, extension: Option, + stacks: &FuncTranslationStacks, ) -> WasmResult { gc::translate_struct_get( self, @@ -2792,6 +2834,7 @@ impl FuncEnvironment<'_> { field_index, struct_ref, extension, + stacks, ) } @@ -2802,6 +2845,7 @@ impl FuncEnvironment<'_> { field_index: u32, struct_ref: ir::Value, value: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { gc::translate_struct_set( self, @@ -2810,6 +2854,7 @@ impl FuncEnvironment<'_> { field_index, struct_ref, value, + stacks, ) } @@ -2818,8 +2863,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder<'_>, tag_index: TagIndex, exn_ref: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult> { - gc::translate_exn_unbox(self, builder, tag_index, exn_ref) + gc::translate_exn_unbox(self, builder, tag_index, exn_ref, stacks) } pub fn translate_exn_throw( @@ -2828,8 +2874,9 @@ impl FuncEnvironment<'_> { tag_index: TagIndex, args: &[ir::Value], handlers: impl IntoIterator, Block)>, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { - gc::translate_exn_throw(self, builder, tag_index, args, handlers) + gc::translate_exn_throw(self, builder, tag_index, args, handlers, stacks) } pub fn translate_exn_throw_ref( @@ -2837,8 +2884,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder<'_>, exnref: ir::Value, handlers: impl IntoIterator, Block)>, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { - gc::translate_exn_throw_ref(self, builder, exnref, handlers) + gc::translate_exn_throw_ref(self, builder, exnref, handlers, stacks) } pub fn translate_array_new( @@ -2847,8 +2895,9 @@ impl FuncEnvironment<'_> { array_type_index: TypeIndex, elem: ir::Value, len: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_array_new(self, builder, array_type_index, elem, len) + gc::translate_array_new(self, builder, array_type_index, elem, len, stacks) } pub fn translate_array_new_default( @@ -2856,8 +2905,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, array_type_index: TypeIndex, len: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_array_new_default(self, builder, array_type_index, len) + gc::translate_array_new_default(self, builder, array_type_index, len, stacks) } pub fn translate_array_new_fixed( @@ -2865,8 +2915,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, array_type_index: TypeIndex, elems: &[ir::Value], + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_array_new_fixed(self, builder, array_type_index, elems) + gc::translate_array_new_fixed(self, builder, array_type_index, elems, stacks) } pub fn translate_array_new_data( @@ -2941,8 +2992,18 @@ impl FuncEnvironment<'_> { index: ir::Value, value: ir::Value, len: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { - gc::translate_array_fill(self, builder, array_type_index, array, index, value, len) + gc::translate_array_fill( + self, + builder, + array_type_index, + array, + index, + value, + len, + stacks, + ) } pub fn translate_array_init_data( @@ -3013,8 +3074,9 @@ impl FuncEnvironment<'_> { &mut self, builder: &mut FunctionBuilder, array: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_array_len(self, builder, array) + gc::translate_array_len(self, builder, array, stacks) } pub fn translate_array_get( @@ -3024,8 +3086,17 @@ impl FuncEnvironment<'_> { array: ir::Value, index: ir::Value, extension: Option, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_array_get(self, builder, array_type_index, array, index, extension) + gc::translate_array_get( + self, + builder, + array_type_index, + array, + index, + extension, + stacks, + ) } pub fn translate_array_set( @@ -3035,8 +3106,9 @@ impl FuncEnvironment<'_> { array: ir::Value, index: ir::Value, value: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { - gc::translate_array_set(self, builder, array_type_index, array, index, value) + gc::translate_array_set(self, builder, array_type_index, array, index, value, stacks) } pub fn translate_ref_test( @@ -3045,8 +3117,9 @@ impl FuncEnvironment<'_> { test_ty: WasmRefType, gc_ref: ir::Value, gc_ref_ty: WasmRefType, + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc::translate_ref_test(self, builder, test_ty, gc_ref, gc_ref_ty) + gc::translate_ref_test(self, builder, test_ty, gc_ref, gc_ref_ty, stacks) } pub fn translate_ref_null( @@ -3111,6 +3184,7 @@ impl FuncEnvironment<'_> { &mut self, builder: &mut FunctionBuilder<'_>, global_index: GlobalIndex, + stacks: &FuncTranslationStacks, ) -> WasmResult { match self.get_or_create_global(builder.func, global_index) { GlobalVariable::Memory { gv, offset, ty } => { @@ -3151,6 +3225,7 @@ impl FuncEnvironment<'_> { } else { ir::MemFlags::trusted().with_readonly().with_can_move() }, + stacks, ) } } @@ -3161,6 +3236,7 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder<'_>, global_index: GlobalIndex, val: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { match self.get_or_create_global(builder.func, global_index) { GlobalVariable::Memory { gv, offset, ty } => { @@ -3197,6 +3273,7 @@ impl FuncEnvironment<'_> { src, val, ir::MemFlags::trusted(), + stacks, )? } } @@ -3805,7 +3882,7 @@ impl FuncEnvironment<'_> { pub fn before_translate_function( &mut self, builder: &mut FunctionBuilder, - _state: &FuncTranslationStacks, + state: &FuncTranslationStacks, ) -> WasmResult<()> { // If an explicit stack limit is requested, emit one here at the start // of the function. @@ -3813,7 +3890,7 @@ impl FuncEnvironment<'_> { let limit = builder.ins().global_value(self.pointer_type(), gv); let sp = builder.ins().get_stack_pointer(self.pointer_type()); let overflow = builder.ins().icmp(IntCC::UnsignedLessThan, sp, limit); - self.conditionally_trap(builder, overflow, ir::TrapCode::STACK_OVERFLOW); + self.conditionally_trap(builder, overflow, ir::TrapCode::STACK_OVERFLOW, state); } // Additionally we initialize `fuel_var` if it will get used. @@ -4383,7 +4460,12 @@ impl FuncEnvironment<'_> { &*self.isa } - pub fn trap(&mut self, builder: &mut FunctionBuilder, trap: ir::TrapCode) { + pub fn trap( + &mut self, + builder: &mut FunctionBuilder, + trap: ir::TrapCode, + stacks: &FuncTranslationStacks, + ) { match ( self.clif_instruction_traps_enabled(), crate::clif_trap_to_env_trap(trap), @@ -4403,31 +4485,45 @@ impl FuncEnvironment<'_> { let trap_code = builder.ins().iconst(I8, i64::from(trap as u8)); builder.ins().call(libcall, &[vmctx, trap_code]); let raise = self.builtin_functions.raise(&mut builder.func); - builder.ins().call(raise, &[vmctx]); + let call = builder.ins().call(raise, &[vmctx]); + let tags = self.debug_tags(stacks, builder.srcloc()); + builder.func.debug_tags.set(call, tags); builder.ins().trap(TRAP_INTERNAL_ASSERT); } } } - pub fn trapz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) { + pub fn trapz( + &mut self, + builder: &mut FunctionBuilder, + value: ir::Value, + trap: ir::TrapCode, + stacks: &FuncTranslationStacks, + ) { if self.clif_instruction_traps_enabled() { builder.ins().trapz(value, trap); } else { let ty = builder.func.dfg.value_type(value); let zero = builder.ins().iconst(ty, 0); let cmp = builder.ins().icmp(IntCC::Equal, value, zero); - self.conditionally_trap(builder, cmp, trap); + self.conditionally_trap(builder, cmp, trap, stacks); } } - pub fn trapnz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) { + pub fn trapnz( + &mut self, + builder: &mut FunctionBuilder, + value: ir::Value, + trap: ir::TrapCode, + stacks: &FuncTranslationStacks, + ) { if self.clif_instruction_traps_enabled() { builder.ins().trapnz(value, trap); } else { let ty = builder.func.dfg.value_type(value); let zero = builder.ins().iconst(ty, 0); let cmp = builder.ins().icmp(IntCC::NotEqual, value, zero); - self.conditionally_trap(builder, cmp, trap); + self.conditionally_trap(builder, cmp, trap, stacks); } } @@ -4437,12 +4533,13 @@ impl FuncEnvironment<'_> { lhs: ir::Value, rhs: ir::Value, trap: ir::TrapCode, + stacks: &FuncTranslationStacks, ) -> ir::Value { if self.clif_instruction_traps_enabled() { builder.ins().uadd_overflow_trap(lhs, rhs, trap) } else { let (ret, overflow) = builder.ins().uadd_overflow(lhs, rhs); - self.conditionally_trap(builder, overflow, trap); + self.conditionally_trap(builder, overflow, trap, stacks); ret } } @@ -4452,8 +4549,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, lhs: ir::Value, rhs: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { - self.guard_signed_divide(builder, lhs, rhs); + self.guard_signed_divide(builder, lhs, rhs, stacks); builder.ins().sdiv(lhs, rhs) } @@ -4462,8 +4560,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, lhs: ir::Value, rhs: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { - self.guard_zero_divisor(builder, rhs); + self.guard_zero_divisor(builder, rhs, stacks); builder.ins().udiv(lhs, rhs) } @@ -4472,8 +4571,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, lhs: ir::Value, rhs: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { - self.guard_zero_divisor(builder, rhs); + self.guard_zero_divisor(builder, rhs, stacks); builder.ins().srem(lhs, rhs) } @@ -4482,8 +4582,9 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, lhs: ir::Value, rhs: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { - self.guard_zero_divisor(builder, rhs); + self.guard_zero_divisor(builder, rhs, stacks); builder.ins().urem(lhs, rhs) } @@ -4492,11 +4593,12 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, ty: ir::Type, val: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { // NB: for now avoid translating this entire instruction to CLIF and // just do it in a libcall. if !self.clif_instruction_traps_enabled() { - self.guard_fcvt_to_int(builder, ty, val, true); + self.guard_fcvt_to_int(builder, ty, val, true, stacks); } builder.ins().fcvt_to_sint(ty, val) } @@ -4506,9 +4608,10 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, ty: ir::Type, val: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { if !self.clif_instruction_traps_enabled() { - self.guard_fcvt_to_int(builder, ty, val, false); + self.guard_fcvt_to_int(builder, ty, val, false, stacks); } builder.ins().fcvt_to_uint(ty, val) } diff --git a/crates/cranelift/src/func_environ/gc.rs b/crates/cranelift/src/func_environ/gc.rs index 87cc59a212e3..5f1fb4593707 100644 --- a/crates/cranelift/src/func_environ/gc.rs +++ b/crates/cranelift/src/func_environ/gc.rs @@ -5,7 +5,7 @@ //! to have just a single `cfg(feature = "gc")` for the whole crate, which //! selects between these two implementations. -use crate::func_environ::FuncEnvironment; +use crate::{func_environ::FuncEnvironment, translate::FuncTranslationStacks}; use cranelift_codegen::ir; use cranelift_frontend::FunctionBuilder; use wasmtime_environ::{GcTypeLayouts, TagIndex, TypeIndex, WasmRefType, WasmResult}; @@ -52,6 +52,7 @@ pub trait GcCompiler { builder: &mut FunctionBuilder<'_>, array_type_index: TypeIndex, init: ArrayInit<'_>, + stacks: &FuncTranslationStacks, ) -> WasmResult; /// Emit code to allocate a new struct. @@ -65,6 +66,7 @@ pub trait GcCompiler { builder: &mut FunctionBuilder<'_>, struct_type_index: TypeIndex, fields: &[ir::Value], + stacks: &FuncTranslationStacks, ) -> WasmResult; /// Emit code to allocate a new exception object. @@ -83,6 +85,7 @@ pub trait GcCompiler { fields: &[ir::Value], instance_id: ir::Value, tag: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult; /// Emit a read barrier for when we are cloning a GC reference onto the Wasm @@ -124,6 +127,7 @@ pub trait GcCompiler { ty: WasmRefType, src: ir::Value, flags: ir::MemFlags, + stacks: &FuncTranslationStacks, ) -> WasmResult; /// Emit a write barrier for when we are writing a GC reference over another @@ -166,6 +170,7 @@ pub trait GcCompiler { dst: ir::Value, new_val: ir::Value, flags: ir::MemFlags, + stacks: &FuncTranslationStacks, ) -> WasmResult<()>; } diff --git a/crates/cranelift/src/func_environ/gc/disabled.rs b/crates/cranelift/src/func_environ/gc/disabled.rs index 9a0fb4e036ff..4ee24a8dab67 100644 --- a/crates/cranelift/src/func_environ/gc/disabled.rs +++ b/crates/cranelift/src/func_environ/gc/disabled.rs @@ -2,6 +2,7 @@ use super::GcCompiler; use crate::func_environ::{Extension, FuncEnvironment}; +use crate::translate::FuncTranslationStacks; use cranelift_codegen::ir; use cranelift_frontend::FunctionBuilder; use smallvec::SmallVec; @@ -24,6 +25,7 @@ pub fn translate_struct_new( _builder: &mut FunctionBuilder<'_>, _struct_type_index: TypeIndex, _fields: &[ir::Value], + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -32,6 +34,7 @@ pub fn translate_struct_new_default( _func_env: &mut FuncEnvironment<'_>, _builder: &mut FunctionBuilder<'_>, _struct_type_index: TypeIndex, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -43,6 +46,7 @@ pub fn translate_struct_get( _field_index: u32, _struct_ref: ir::Value, _extension: Option, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -54,6 +58,7 @@ pub fn translate_struct_set( _field_index: u32, _struct_ref: ir::Value, _new_val: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult<()> { disabled() } @@ -63,6 +68,7 @@ pub fn translate_exn_unbox( _builder: &mut FunctionBuilder<'_>, _tag_index: TagIndex, _exn_ref: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult> { disabled() } @@ -73,6 +79,7 @@ pub fn translate_exn_throw( _tag_index: TagIndex, _args: &[ir::Value], _handlers: impl IntoIterator, ir::Block)>, + _stacks: &FuncTranslationStacks, ) -> WasmResult<()> { disabled() } @@ -82,6 +89,7 @@ pub fn translate_exn_throw_ref( _builder: &mut FunctionBuilder<'_>, _exnref: ir::Value, _handlers: impl IntoIterator, ir::Block)>, + _stacks: &FuncTranslationStacks, ) -> WasmResult<()> { disabled() } @@ -92,6 +100,7 @@ pub fn translate_array_new( _array_type_index: TypeIndex, _elem: ir::Value, _len: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -101,6 +110,7 @@ pub fn translate_array_new_default( _builder: &mut FunctionBuilder, _array_type_index: TypeIndex, _len: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -110,6 +120,7 @@ pub fn translate_array_new_fixed( _builder: &mut FunctionBuilder, _array_type_index: TypeIndex, _elems: &[ir::Value], + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -122,6 +133,7 @@ pub fn translate_array_fill( _index: ir::Value, _value: ir::Value, _n: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult<()> { disabled() } @@ -130,6 +142,7 @@ pub fn translate_array_len( _func_env: &mut FuncEnvironment<'_>, _builder: &mut FunctionBuilder, _array: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -141,6 +154,7 @@ pub fn translate_array_get( _array: ir::Value, _index: ir::Value, _extension: Option, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } @@ -152,6 +166,7 @@ pub fn translate_array_set( _array: ir::Value, _index: ir::Value, _value: ir::Value, + _stacks: &FuncTranslationStacks, ) -> WasmResult<()> { disabled() } @@ -162,6 +177,7 @@ pub fn translate_ref_test( _test_ty: WasmRefType, _val: ir::Value, _val_ty: WasmRefType, + _stacks: &FuncTranslationStacks, ) -> WasmResult { disabled() } diff --git a/crates/cranelift/src/func_environ/gc/enabled.rs b/crates/cranelift/src/func_environ/gc/enabled.rs index dc780f5e2aa9..68016347e537 100644 --- a/crates/cranelift/src/func_environ/gc/enabled.rs +++ b/crates/cranelift/src/func_environ/gc/enabled.rs @@ -1,7 +1,7 @@ use super::{ArrayInit, GcCompiler}; use crate::bounds_checks::BoundsCheck; use crate::func_environ::{Extension, FuncEnvironment}; -use crate::translate::{Heap, HeapData, StructFieldsVec, TargetEnvironment}; +use crate::translate::{FuncTranslationStacks, Heap, HeapData, StructFieldsVec, TargetEnvironment}; use crate::{Reachability, TRAP_INTERNAL_ASSERT}; use cranelift_codegen::ir::immediates::Offset32; use cranelift_codegen::ir::{ @@ -106,6 +106,7 @@ fn read_field_at_addr( ty: WasmStorageType, addr: ir::Value, extension: Option, + stacks: &FuncTranslationStacks, ) -> WasmResult { assert_eq!(extension.is_none(), matches!(ty, WasmStorageType::Val(_))); assert_eq!( @@ -128,7 +129,7 @@ fn read_field_at_addr( WasmValType::Ref(r) => match r.heap_type.top() { WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => { gc_compiler(func_env)? - .translate_read_gc_reference(func_env, builder, r, addr, flags)? + .translate_read_gc_reference(func_env, builder, r, addr, flags, stacks)? } WasmHeapTopType::Func => { let expected_ty = match r.heap_type { @@ -233,6 +234,7 @@ fn write_field_at_addr( field_ty: WasmStorageType, field_addr: ir::Value, new_val: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { // Data inside GC objects is always little endian. let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little); @@ -248,8 +250,9 @@ fn write_field_at_addr( write_func_ref_at_addr(func_env, builder, r, flags, field_addr, new_val)?; } WasmStorageType::Val(WasmValType::Ref(r)) => { - gc_compiler(func_env)? - .translate_write_gc_reference(func_env, builder, r, field_addr, new_val, flags)?; + gc_compiler(func_env)?.translate_write_gc_reference( + func_env, builder, r, field_addr, new_val, flags, stacks, + )?; } WasmStorageType::Val(_) => { assert_eq!( @@ -267,8 +270,9 @@ pub fn translate_struct_new( builder: &mut FunctionBuilder<'_>, struct_type_index: TypeIndex, fields: &[ir::Value], + stacks: &FuncTranslationStacks, ) -> WasmResult { - gc_compiler(func_env)?.alloc_struct(func_env, builder, struct_type_index, &fields) + gc_compiler(func_env)?.alloc_struct(func_env, builder, struct_type_index, &fields, stacks) } fn default_value( @@ -304,6 +308,7 @@ pub fn translate_struct_new_default( func_env: &mut FuncEnvironment<'_>, builder: &mut FunctionBuilder<'_>, struct_type_index: TypeIndex, + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_ty = func_env.module.types[struct_type_index].unwrap_module_type_index(); let struct_ty = func_env.types.unwrap_struct(interned_ty)?; @@ -312,7 +317,7 @@ pub fn translate_struct_new_default( .iter() .map(|f| default_value(&mut builder.cursor(), func_env, &f.element_type)) .collect::(); - gc_compiler(func_env)?.alloc_struct(func_env, builder, struct_type_index, &fields) + gc_compiler(func_env)?.alloc_struct(func_env, builder, struct_type_index, &fields, stacks) } pub fn translate_struct_get( @@ -322,6 +327,7 @@ pub fn translate_struct_get( field_index: u32, struct_ref: ir::Value, extension: Option, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!( "translate_struct_get({struct_type_index:?}, {field_index:?}, {struct_ref:?}, {extension:?})" @@ -330,7 +336,7 @@ pub fn translate_struct_get( // TODO: If we know we have a `(ref $my_struct)` here, instead of maybe a // `(ref null $my_struct)`, we could omit the `trapz`. But plumbing that // type info from `wasmparser` and through to here is a bit funky. - func_env.trapz(builder, struct_ref, crate::TRAP_NULL_REFERENCE); + func_env.trapz(builder, struct_ref, crate::TRAP_NULL_REFERENCE, stacks); let field_index = usize::try_from(field_index).unwrap(); let interned_type_index = func_env.module.types[struct_type_index].unwrap_module_type_index(); @@ -351,6 +357,7 @@ pub fn translate_struct_get( access_size: u8::try_from(field_size).unwrap(), object_size: struct_size, }, + stacks, ); let result = read_field_at_addr( @@ -359,6 +366,7 @@ pub fn translate_struct_get( field_ty.element_type, field_addr, extension, + stacks, ); log::trace!("translate_struct_get(..) -> {result:?}"); result @@ -371,13 +379,14 @@ pub fn translate_struct_set( field_index: u32, struct_ref: ir::Value, new_val: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { log::trace!( "translate_struct_set({struct_type_index:?}, {field_index:?}, struct_ref: {struct_ref:?}, new_val: {new_val:?})" ); // TODO: See comment in `translate_struct_get` about the `trapz`. - func_env.trapz(builder, struct_ref, crate::TRAP_NULL_REFERENCE); + func_env.trapz(builder, struct_ref, crate::TRAP_NULL_REFERENCE, stacks); let field_index = usize::try_from(field_index).unwrap(); let interned_type_index = func_env.module.types[struct_type_index].unwrap_module_type_index(); @@ -398,6 +407,7 @@ pub fn translate_struct_set( access_size: u8::try_from(field_size).unwrap(), object_size: struct_size, }, + stacks, ); write_field_at_addr( @@ -406,6 +416,7 @@ pub fn translate_struct_set( field_ty.element_type, field_addr, new_val, + stacks, )?; log::trace!("translate_struct_set: finished"); @@ -417,6 +428,7 @@ pub fn translate_exn_unbox( builder: &mut FunctionBuilder<'_>, tag_index: TagIndex, exn_ref: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult> { log::trace!("translate_exn_unbox({tag_index:?}, {exn_ref:?})"); @@ -453,9 +465,10 @@ pub fn translate_exn_unbox( access_size: u8::try_from(field_size).unwrap(), object_size: exn_size, }, + stacks, ); - let value = read_field_at_addr(func_env, builder, field_ty, field_addr, None)?; + let value = read_field_at_addr(func_env, builder, field_ty, field_addr, None, stacks)?; result.push(value); } @@ -469,6 +482,7 @@ pub fn translate_exn_throw( tag_index: TagIndex, args: &[ir::Value], handlers: impl IntoIterator, Block)>, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { let (instance_id, defined_tag_id) = func_env.get_instance_and_tag(builder, tag_index); let exnref = gc_compiler(func_env)?.alloc_exn( @@ -478,8 +492,9 @@ pub fn translate_exn_throw( args, instance_id, defined_tag_id, + stacks, )?; - translate_exn_throw_ref(func_env, builder, exnref, handlers) + translate_exn_throw_ref(func_env, builder, exnref, handlers, stacks) } pub fn translate_exn_throw_ref( @@ -487,6 +502,7 @@ pub fn translate_exn_throw_ref( builder: &mut FunctionBuilder<'_>, exnref: ir::Value, handlers: impl IntoIterator, Block)>, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { let builtin = func_env.builtin_functions.throw_ref(builder.func); let sig = builder.func.dfg.ext_funcs[builtin].signature; @@ -520,7 +536,7 @@ pub fn translate_exn_throw_ref( builder.switch_to_block(continuation); builder.seal_block(continuation); - func_env.trap(builder, crate::TRAP_UNREACHABLE); + func_env.trap(builder, crate::TRAP_UNREACHABLE, stacks); Ok(()) } @@ -531,6 +547,7 @@ pub fn translate_array_new( array_type_index: TypeIndex, elem: ir::Value, len: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_array_new({array_type_index:?}, {elem:?}, {len:?})"); let result = gc_compiler(func_env)?.alloc_array( @@ -538,6 +555,7 @@ pub fn translate_array_new( builder, array_type_index, ArrayInit::Fill { elem, len }, + stacks, )?; log::trace!("translate_array_new(..) -> {result:?}"); Ok(result) @@ -548,6 +566,7 @@ pub fn translate_array_new_default( builder: &mut FunctionBuilder, array_type_index: TypeIndex, len: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_array_new_default({array_type_index:?}, {len:?})"); @@ -559,6 +578,7 @@ pub fn translate_array_new_default( builder, array_type_index, ArrayInit::Fill { elem, len }, + stacks, )?; log::trace!("translate_array_new_default(..) -> {result:?}"); Ok(result) @@ -569,6 +589,7 @@ pub fn translate_array_new_fixed( builder: &mut FunctionBuilder, array_type_index: TypeIndex, elems: &[ir::Value], + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_array_new_fixed({array_type_index:?}, {elems:?})"); let result = gc_compiler(func_env)?.alloc_array( @@ -576,6 +597,7 @@ pub fn translate_array_new_fixed( builder, array_type_index, ArrayInit::Elems(elems), + stacks, )?; log::trace!("translate_array_new_fixed(..) -> {result:?}"); Ok(result) @@ -757,19 +779,26 @@ pub fn translate_array_fill( index: ir::Value, value: ir::Value, n: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { log::trace!( "translate_array_fill({array_type_index:?}, {array_ref:?}, {index:?}, {value:?}, {n:?})" ); - let len = translate_array_len(func_env, builder, array_ref)?; + let len = translate_array_len(func_env, builder, array_ref, stacks)?; // Check that the full range of elements we want to fill is within bounds. - let end_index = func_env.uadd_overflow_trap(builder, index, n, crate::TRAP_ARRAY_OUT_OF_BOUNDS); + let end_index = + func_env.uadd_overflow_trap(builder, index, n, crate::TRAP_ARRAY_OUT_OF_BOUNDS, stacks); let out_of_bounds = builder .ins() .icmp(IntCC::UnsignedGreaterThan, end_index, len); - func_env.trapnz(builder, out_of_bounds, crate::TRAP_ARRAY_OUT_OF_BOUNDS); + func_env.trapnz( + builder, + out_of_bounds, + crate::TRAP_ARRAY_OUT_OF_BOUNDS, + stacks, + ); // Get the address of the first element we want to fill. let interned_type_index = func_env.module.types[array_type_index].unwrap_module_type_index(); @@ -787,6 +816,7 @@ pub fn translate_array_fill( offset: obj_offset, object_size: obj_size, }, + stacks, ); // Calculate the end address, just after the filled region. @@ -809,7 +839,7 @@ pub fn translate_array_fill( .unwrap_array(interned_type_index)? .0 .element_type; - write_field_at_addr(func_env, builder, elem_ty, elem_addr, value) + write_field_at_addr(func_env, builder, elem_ty, elem_addr, value, stacks) }, ); log::trace!("translate_array_fill(..) -> {result:?}"); @@ -820,10 +850,11 @@ pub fn translate_array_len( func_env: &mut FuncEnvironment<'_>, builder: &mut FunctionBuilder, array_ref: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_array_len({array_ref:?})"); - func_env.trapz(builder, array_ref, crate::TRAP_NULL_REFERENCE); + func_env.trapz(builder, array_ref, crate::TRAP_NULL_REFERENCE, stacks); let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset(); let len_field = func_env.prepare_gc_ref_access( @@ -835,6 +866,7 @@ pub fn translate_array_len( offset: len_offset, access_size: u8::try_from(ir::types::I32.bytes()).unwrap(), }, + stacks, ); let result = builder.ins().load( ir::types::I32, @@ -915,6 +947,7 @@ fn array_elem_addr( array_type_index: ModuleInternedTypeIndex, array_ref: ir::Value, index: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { // First, assert that `index < array.length`. // @@ -926,10 +959,10 @@ fn array_elem_addr( // code in `bounds_check.rs` to implement these bounds checks. That is all // planned, but not yet implemented. - let len = translate_array_len(func_env, builder, array_ref).unwrap(); + let len = translate_array_len(func_env, builder, array_ref, stacks).unwrap(); let in_bounds = builder.ins().icmp(IntCC::UnsignedLessThan, index, len); - func_env.trapz(builder, in_bounds, crate::TRAP_ARRAY_OUT_OF_BOUNDS); + func_env.trapz(builder, in_bounds, crate::TRAP_ARRAY_OUT_OF_BOUNDS, stacks); // Compute the size (in bytes) of the whole array object. let ArraySizeInfo { @@ -969,6 +1002,7 @@ fn array_elem_addr( offset: offset_in_array, object_size: obj_size, }, + stacks, ) } @@ -979,16 +1013,24 @@ pub fn translate_array_get( array_ref: ir::Value, index: ir::Value, extension: Option, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_array_get({array_type_index:?}, {array_ref:?}, {index:?})"); let array_type_index = func_env.module.types[array_type_index].unwrap_module_type_index(); - let elem_addr = array_elem_addr(func_env, builder, array_type_index, array_ref, index); + let elem_addr = array_elem_addr( + func_env, + builder, + array_type_index, + array_ref, + index, + stacks, + ); let array_ty = func_env.types.unwrap_array(array_type_index)?; let elem_ty = array_ty.0.element_type; - let result = read_field_at_addr(func_env, builder, elem_ty, elem_addr, extension)?; + let result = read_field_at_addr(func_env, builder, elem_ty, elem_addr, extension, stacks)?; log::trace!("translate_array_get(..) -> {result:?}"); Ok(result) } @@ -1000,16 +1042,24 @@ pub fn translate_array_set( array_ref: ir::Value, index: ir::Value, value: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { log::trace!("translate_array_set({array_type_index:?}, {array_ref:?}, {index:?}, {value:?})"); let array_type_index = func_env.module.types[array_type_index].unwrap_module_type_index(); - let elem_addr = array_elem_addr(func_env, builder, array_type_index, array_ref, index); + let elem_addr = array_elem_addr( + func_env, + builder, + array_type_index, + array_ref, + index, + stacks, + ); let array_ty = func_env.types.unwrap_array(array_type_index)?; let elem_ty = array_ty.0.element_type; - write_field_at_addr(func_env, builder, elem_ty, elem_addr, value)?; + write_field_at_addr(func_env, builder, elem_ty, elem_addr, value, stacks)?; log::trace!("translate_array_set: finished"); Ok(()) @@ -1021,6 +1071,7 @@ pub fn translate_ref_test( test_ty: WasmRefType, val: ir::Value, val_ty: WasmRefType, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_ref_test({test_ty:?}, {val:?})"); @@ -1147,6 +1198,7 @@ pub fn translate_ref_test( access_size: wasmtime_environ::VM_GC_KIND_SIZE, object_size: wasmtime_environ::VM_GC_HEADER_SIZE, }, + stacks, ); let actual_kind = builder.ins().load( ir::types::I32, @@ -1204,6 +1256,7 @@ pub fn translate_ref_test( offset: wasmtime_environ::VM_GC_HEADER_TYPE_INDEX_OFFSET, access_size: func_env.offsets.size_of_vmshared_type_index(), }, + stacks, ); let actual_shared_ty = builder.ins().load( ir::types::I32, @@ -1278,6 +1331,7 @@ fn emit_array_size( builder: &mut FunctionBuilder<'_>, array_layout: &GcArrayLayout, len: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { let base_size = builder .ins() @@ -1300,7 +1354,7 @@ fn emit_array_size( .ins() .imul_imm(len, i64::from(array_layout.elem_size)); let high_bits = builder.ins().ushr_imm(elems_size_64, 32); - func_env.trapnz(builder, high_bits, crate::TRAP_ALLOCATION_TOO_LARGE); + func_env.trapnz(builder, high_bits, crate::TRAP_ALLOCATION_TOO_LARGE, stacks); let elems_size = builder.ins().ireduce(ir::types::I32, elems_size_64); // And if adding the base size and elements size overflows, then the @@ -1310,6 +1364,7 @@ fn emit_array_size( base_size, elems_size, crate::TRAP_ALLOCATION_TOO_LARGE, + stacks, ); size @@ -1486,6 +1541,7 @@ impl FuncEnvironment<'_> { builder: &mut FunctionBuilder, gc_ref: ir::Value, bounds_check: BoundsCheck, + stacks: &FuncTranslationStacks, ) -> ir::Value { log::trace!("prepare_gc_ref_access({gc_ref:?}, {bounds_check:?})"); assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32); @@ -1499,6 +1555,7 @@ impl FuncEnvironment<'_> { gc_ref, bounds_check, crate::TRAP_INTERNAL_ASSERT, + stacks, ) { Reachability::Reachable(v) => v, Reachability::Unreachable => { diff --git a/crates/cranelift/src/func_environ/gc/enabled/drc.rs b/crates/cranelift/src/func_environ/gc/enabled/drc.rs index d0abb2ee53b6..4422853b67c6 100644 --- a/crates/cranelift/src/func_environ/gc/enabled/drc.rs +++ b/crates/cranelift/src/func_environ/gc/enabled/drc.rs @@ -2,7 +2,7 @@ //! barriers. use super::*; -use crate::translate::TargetEnvironment; +use crate::translate::{FuncTranslationStacks, TargetEnvironment}; use crate::{TRAP_INTERNAL_ASSERT, func_environ::FuncEnvironment}; use cranelift_codegen::ir::condcodes::IntCC; use cranelift_codegen::ir::{self, InstBuilder}; @@ -28,6 +28,7 @@ impl DrcCompiler { func_env: &mut FuncEnvironment<'_>, builder: &mut FunctionBuilder, gc_ref: ir::Value, + stacks: &FuncTranslationStacks, ) -> ir::Value { let offset = func_env.offsets.vm_drc_header_ref_count(); let pointer = func_env.prepare_gc_ref_access( @@ -37,6 +38,7 @@ impl DrcCompiler { offset, access_size: u8::try_from(ir::types::I64.bytes()).unwrap(), }, + stacks, ); builder .ins() @@ -53,6 +55,7 @@ impl DrcCompiler { builder: &mut FunctionBuilder, gc_ref: ir::Value, new_ref_count: ir::Value, + stacks: &FuncTranslationStacks, ) { let offset = func_env.offsets.vm_drc_header_ref_count(); let pointer = func_env.prepare_gc_ref_access( @@ -62,6 +65,7 @@ impl DrcCompiler { offset, access_size: u8::try_from(ir::types::I64.bytes()).unwrap(), }, + stacks, ); builder .ins() @@ -80,11 +84,12 @@ impl DrcCompiler { builder: &mut FunctionBuilder, gc_ref: ir::Value, delta: i64, + stacks: &FuncTranslationStacks, ) -> ir::Value { debug_assert!(delta == -1 || delta == 1); - let old_ref_count = self.load_ref_count(func_env, builder, gc_ref); + let old_ref_count = self.load_ref_count(func_env, builder, gc_ref, stacks); let new_ref_count = builder.ins().iadd_imm(old_ref_count, delta); - self.store_ref_count(func_env, builder, gc_ref, new_ref_count); + self.store_ref_count(func_env, builder, gc_ref, new_ref_count, stacks); new_ref_count } @@ -99,6 +104,7 @@ impl DrcCompiler { builder: &mut FunctionBuilder<'_>, gc_ref: ir::Value, reserved: ir::Value, + stacks: &FuncTranslationStacks, ) { debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32); debug_assert_eq!(builder.func.dfg.value_type(reserved), ir::types::I32); @@ -112,11 +118,11 @@ impl DrcCompiler { .load(ir::types::I32, ir::MemFlags::trusted(), head, 0); // Update our object's header to point to `next` and consider itself part of the list. - self.set_next_over_approximated_stack_root(func_env, builder, gc_ref, next); - self.set_in_over_approximated_stack_roots_bit(func_env, builder, gc_ref, reserved); + self.set_next_over_approximated_stack_root(func_env, builder, gc_ref, next, stacks); + self.set_in_over_approximated_stack_roots_bit(func_env, builder, gc_ref, reserved, stacks); // Increment our ref count because the list is logically holding a strong reference. - self.mutate_ref_count(func_env, builder, gc_ref, 1); + self.mutate_ref_count(func_env, builder, gc_ref, 1, stacks); // Commit this object as the new head of the list. builder @@ -149,6 +155,7 @@ impl DrcCompiler { builder: &mut FunctionBuilder<'_>, gc_ref: ir::Value, next: ir::Value, + stacks: &FuncTranslationStacks, ) { debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32); debug_assert_eq!(builder.func.dfg.value_type(next), ir::types::I32); @@ -161,6 +168,7 @@ impl DrcCompiler { .vm_drc_header_next_over_approximated_stack_root(), access_size: u8::try_from(ir::types::I32.bytes()).unwrap(), }, + stacks, ); builder.ins().store(ir::MemFlags::trusted(), next, ptr, 0); } @@ -173,13 +181,14 @@ impl DrcCompiler { builder: &mut FunctionBuilder<'_>, gc_ref: ir::Value, old_reserved_bits: ir::Value, + stacks: &FuncTranslationStacks, ) { let in_set_bit = builder.ins().iconst( ir::types::I32, i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT), ); let new_reserved = builder.ins().bor(old_reserved_bits, in_set_bit); - self.set_reserved_bits(func_env, builder, gc_ref, new_reserved); + self.set_reserved_bits(func_env, builder, gc_ref, new_reserved, stacks); } /// Update the reserved bits in a `VMDrcHeader`. @@ -189,6 +198,7 @@ impl DrcCompiler { builder: &mut FunctionBuilder<'_>, gc_ref: ir::Value, new_reserved: ir::Value, + stacks: &FuncTranslationStacks, ) { let ptr = func_env.prepare_gc_ref_access( builder, @@ -197,6 +207,7 @@ impl DrcCompiler { offset: func_env.offsets.vm_gc_header_reserved_bits(), access_size: u8::try_from(ir::types::I32.bytes()).unwrap(), }, + stacks, ); builder .ins() @@ -211,6 +222,7 @@ impl DrcCompiler { field_addr: ir::Value, ty: WasmStorageType, val: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { // Data inside GC objects is always little endian. let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little); @@ -222,7 +234,9 @@ impl DrcCompiler { write_func_ref_at_addr(func_env, builder, r, flags, field_addr, val)?; } WasmStorageType::Val(WasmValType::Ref(r)) => { - self.translate_init_gc_reference(func_env, builder, r, field_addr, val, flags)?; + self.translate_init_gc_reference( + func_env, builder, r, field_addr, val, flags, stacks, + )?; } WasmStorageType::I8 => { assert_eq!(builder.func.dfg.value_type(val), ir::types::I32); @@ -259,6 +273,7 @@ impl DrcCompiler { dst: ir::Value, new_val: ir::Value, flags: ir::MemFlags, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type); debug_assert!(needs_stack_map); @@ -331,7 +346,7 @@ impl DrcCompiler { builder.switch_to_block(inc_ref_block); builder.seal_block(inc_ref_block); log::trace!("DRC initialization barrier: increment the ref count of the initial value"); - self.mutate_ref_count(func_env, builder, new_val, 1); + self.mutate_ref_count(func_env, builder, new_val, 1, stacks); builder.ins().jump(continue_block, &[]); // Join point after we're done with the GC barrier: do the actual store @@ -388,6 +403,7 @@ impl GcCompiler for DrcCompiler { builder: &mut FunctionBuilder<'_>, array_type_index: TypeIndex, init: super::ArrayInit<'_>, + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_type_index = func_env.module.types[array_type_index].unwrap_module_type_index(); @@ -402,7 +418,7 @@ impl GcCompiler for DrcCompiler { // First, compute the array's total size from its base size, element // size, and length. let len = init.len(&mut builder.cursor()); - let size = emit_array_size(func_env, builder, &array_layout, len); + let size = emit_array_size(func_env, builder, &array_layout, len, stacks); // Second, now that we have the array object's total size, call the // `gc_alloc_raw` builtin libcall to allocate the array. @@ -440,7 +456,7 @@ impl GcCompiler for DrcCompiler { size, elems_addr, |func_env, builder, elem_ty, elem_addr, val| { - self.init_field(func_env, builder, elem_addr, elem_ty, val) + self.init_field(func_env, builder, elem_addr, elem_ty, val, stacks) }, )?; Ok(array_ref) @@ -452,6 +468,7 @@ impl GcCompiler for DrcCompiler { builder: &mut FunctionBuilder<'_>, struct_type_index: TypeIndex, field_vals: &[ir::Value], + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_type_index = func_env.module.types[struct_type_index].unwrap_module_type_index(); @@ -489,7 +506,7 @@ impl GcCompiler for DrcCompiler { raw_ptr_to_struct, field_vals, |func_env, builder, ty, field_addr, val| { - self.init_field(func_env, builder, field_addr, ty, val) + self.init_field(func_env, builder, field_addr, ty, val, stacks) }, )?; @@ -504,6 +521,7 @@ impl GcCompiler for DrcCompiler { field_vals: &[ir::Value], instance_id: ir::Value, tag: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_type_index = func_env.module.tags[tag_index] .exception @@ -543,7 +561,7 @@ impl GcCompiler for DrcCompiler { raw_ptr_to_exn, field_vals, |func_env, builder, ty, field_addr, val| { - self.init_field(func_env, builder, field_addr, ty, val) + self.init_field(func_env, builder, field_addr, ty, val, stacks) }, )?; @@ -557,6 +575,7 @@ impl GcCompiler for DrcCompiler { instance_id_addr, WasmStorageType::Val(WasmValType::I32), instance_id, + stacks, )?; let tag_addr = builder .ins() @@ -567,6 +586,7 @@ impl GcCompiler for DrcCompiler { tag_addr, WasmStorageType::Val(WasmValType::I32), tag, + stacks, )?; Ok(exn_ref) @@ -579,6 +599,7 @@ impl GcCompiler for DrcCompiler { ty: WasmRefType, src: ir::Value, flags: ir::MemFlags, + stacks: &FuncTranslationStacks, ) -> WasmResult { log::trace!("translate_read_gc_reference({ty:?}, {src:?}, {flags:?})"); @@ -695,6 +716,7 @@ impl GcCompiler for DrcCompiler { offset: func_env.offsets.vm_gc_header_reserved_bits(), access_size: u8::try_from(ir::types::I32.bytes()).unwrap(), }, + stacks, ); let reserved = builder .ins() @@ -715,7 +737,7 @@ impl GcCompiler for DrcCompiler { log::trace!( "DRC read barrier: push the object onto the over-approximated-stack-roots list" ); - self.push_onto_over_approximated_stack_roots(func_env, builder, gc_ref, reserved); + self.push_onto_over_approximated_stack_roots(func_env, builder, gc_ref, reserved, stacks); builder.ins().jump(continue_block, &[]); // Join point after we're done with the GC barrier. @@ -733,6 +755,7 @@ impl GcCompiler for DrcCompiler { dst: ir::Value, new_val: ir::Value, flags: ir::MemFlags, + stacks: &FuncTranslationStacks, ) -> WasmResult<()> { assert!(ty.is_vmgcref_type()); @@ -859,7 +882,7 @@ impl GcCompiler for DrcCompiler { builder.switch_to_block(inc_ref_block); log::trace!("DRC write barrier: increment new ref's ref count"); builder.seal_block(inc_ref_block); - self.mutate_ref_count(func_env, builder, new_val, 1); + self.mutate_ref_count(func_env, builder, new_val, 1, stacks); builder.ins().jump(check_old_val_block, &[]); // Block to store the new value into `dst` and then check whether the @@ -885,7 +908,7 @@ impl GcCompiler for DrcCompiler { log::trace!( "DRC write barrier: decrement old ref's ref count and check for zero ref count" ); - let ref_count = self.load_ref_count(func_env, builder, old_val); + let ref_count = self.load_ref_count(func_env, builder, old_val, stacks); let new_ref_count = builder.ins().iadd_imm(ref_count, -1); let old_val_needs_drop = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0); builder.ins().brif( @@ -915,7 +938,7 @@ impl GcCompiler for DrcCompiler { builder.switch_to_block(store_dec_ref_block); builder.seal_block(store_dec_ref_block); log::trace!("DRC write barrier: store decremented ref count into old ref"); - self.store_ref_count(func_env, builder, old_val, new_ref_count); + self.store_ref_count(func_env, builder, old_val, new_ref_count, stacks); builder.ins().jump(continue_block, &[]); // Join point after we're done with the GC barrier. diff --git a/crates/cranelift/src/func_environ/gc/enabled/null.rs b/crates/cranelift/src/func_environ/gc/enabled/null.rs index a0e938d55c0f..d8d6df126528 100644 --- a/crates/cranelift/src/func_environ/gc/enabled/null.rs +++ b/crates/cranelift/src/func_environ/gc/enabled/null.rs @@ -6,6 +6,7 @@ use super::*; use crate::func_environ::FuncEnvironment; +use crate::translate::FuncTranslationStacks; use cranelift_codegen::ir::{self, InstBuilder}; use cranelift_frontend::FunctionBuilder; use wasmtime_environ::VMSharedTypeIndex; @@ -44,6 +45,7 @@ impl NullCompiler { ty: Option, size: ir::Value, align: ir::Value, + stacks: &FuncTranslationStacks, ) -> (ir::Value, ir::Value) { log::trace!("emit_inline_alloc(kind={kind:?}, ty={ty:?}, size={size}, align={align})"); @@ -64,7 +66,7 @@ impl NullCompiler { .ins() .iconst(ir::types::I32, i64::from(VMGcKind::MASK)); let masked = builder.ins().band(size, mask); - func_env.trapnz(builder, masked, crate::TRAP_ALLOCATION_TOO_LARGE); + func_env.trapnz(builder, masked, crate::TRAP_ALLOCATION_TOO_LARGE, stacks); // Load the bump "pointer" (it is actually an index into the GC heap, // not a raw pointer). @@ -94,6 +96,7 @@ impl NullCompiler { next, align_minus_one, crate::TRAP_ALLOCATION_TOO_LARGE, + stacks, ); let not_align_minus_one = builder.ins().bnot(align_minus_one); let aligned = builder @@ -101,8 +104,13 @@ impl NullCompiler { .band(next_plus_align_minus_one, not_align_minus_one); // Check whether the allocation fits in the heap space we have left. - let end_of_object = - func_env.uadd_overflow_trap(builder, aligned, size, crate::TRAP_ALLOCATION_TOO_LARGE); + let end_of_object = func_env.uadd_overflow_trap( + builder, + aligned, + size, + crate::TRAP_ALLOCATION_TOO_LARGE, + stacks, + ); let uext_end_of_object = uextend_i32_to_pointer_type(builder, pointer_type, end_of_object); let bound = func_env.get_gc_heap_bound(builder); let is_in_bounds = builder.ins().icmp( @@ -187,6 +195,7 @@ impl GcCompiler for NullCompiler { builder: &mut FunctionBuilder<'_>, array_type_index: TypeIndex, init: super::ArrayInit<'_>, + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_type_index = func_env.module.types[array_type_index].unwrap_module_type_index(); @@ -201,7 +210,7 @@ impl GcCompiler for NullCompiler { // First, compute the array's total size from its base size, element // size, and length. let len = init.len(&mut builder.cursor()); - let size = emit_array_size(func_env, builder, &array_layout, len); + let size = emit_array_size(func_env, builder, &array_layout, len, stacks); // Next, allocate the array. assert!(align.is_power_of_two()); @@ -213,6 +222,7 @@ impl GcCompiler for NullCompiler { Some(interned_type_index), size, align, + stacks, ); // Write the array's length into its field. @@ -237,7 +247,7 @@ impl GcCompiler for NullCompiler { size, elems_addr, |func_env, builder, elem_ty, elem_addr, val| { - write_field_at_addr(func_env, builder, elem_ty, elem_addr, val) + write_field_at_addr(func_env, builder, elem_ty, elem_addr, val, stacks) }, )?; @@ -250,6 +260,7 @@ impl GcCompiler for NullCompiler { builder: &mut FunctionBuilder<'_>, struct_type_index: TypeIndex, field_vals: &[ir::Value], + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_type_index = func_env.module.types[struct_type_index].unwrap_module_type_index(); @@ -274,6 +285,7 @@ impl GcCompiler for NullCompiler { Some(interned_type_index), struct_size_val, align, + stacks, ); // Initialize the struct's fields. @@ -288,7 +300,7 @@ impl GcCompiler for NullCompiler { raw_struct_pointer, field_vals, |func_env, builder, ty, field_addr, val| { - write_field_at_addr(func_env, builder, ty, field_addr, val) + write_field_at_addr(func_env, builder, ty, field_addr, val, stacks) }, )?; @@ -303,6 +315,7 @@ impl GcCompiler for NullCompiler { field_vals: &[ir::Value], instance_id: ir::Value, tag: ir::Value, + stacks: &FuncTranslationStacks, ) -> WasmResult { let interned_type_index = func_env.module.tags[tag_index] .exception @@ -326,6 +339,7 @@ impl GcCompiler for NullCompiler { Some(interned_type_index), exn_size_val, align, + stacks, ); // Initialize the exception object's fields. @@ -340,7 +354,7 @@ impl GcCompiler for NullCompiler { raw_exn_pointer, field_vals, |func_env, builder, ty, field_addr, val| { - write_field_at_addr(func_env, builder, ty, field_addr, val) + write_field_at_addr(func_env, builder, ty, field_addr, val, stacks) }, )?; @@ -354,6 +368,7 @@ impl GcCompiler for NullCompiler { WasmStorageType::Val(WasmValType::I32), instance_id_addr, instance_id, + stacks, )?; let tag_addr = builder .ins() @@ -364,6 +379,7 @@ impl GcCompiler for NullCompiler { WasmStorageType::Val(WasmValType::I32), tag_addr, tag, + stacks, )?; Ok(exn_ref) @@ -376,6 +392,7 @@ impl GcCompiler for NullCompiler { _ty: WasmRefType, src: ir::Value, flags: ir::MemFlags, + _stacks: &FuncTranslationStacks, ) -> WasmResult { // NB: Don't use `unbarriered_load_gc_ref` here because we don't need to // mark the value as requiring inclusion in stack maps. @@ -390,6 +407,7 @@ impl GcCompiler for NullCompiler { dst: ir::Value, new_val: ir::Value, flags: ir::MemFlags, + _stacks: &FuncTranslationStacks, ) -> WasmResult<()> { unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags) } diff --git a/crates/cranelift/src/translate/code_translator.rs b/crates/cranelift/src/translate/code_translator.rs index b7855e3f2746..d55406eb67d6 100644 --- a/crates/cranelift/src/translate/code_translator.rs +++ b/crates/cranelift/src/translate/code_translator.rs @@ -185,7 +185,7 @@ pub fn translate_operator( ***********************************************************************************/ Operator::GlobalGet { global_index } => { let global_index = GlobalIndex::from_u32(*global_index); - let val = environ.translate_global_get(builder, global_index)?; + let val = environ.translate_global_get(builder, global_index, stack)?; stack.push1(val); } Operator::GlobalSet { global_index } => { @@ -195,7 +195,7 @@ pub fn translate_operator( if builder.func.dfg.value_type(val).is_vector() { val = optionally_bitcast_vector(val, I8X16, builder); } - environ.translate_global_set(builder, global_index, val)?; + environ.translate_global_set(builder, global_index, val, stack)?; } /********************************* Stack misc *************************************** * `drop`, `nop`, `unreachable` and `select`. @@ -230,7 +230,7 @@ pub fn translate_operator( // We do nothing } Operator::Unreachable => { - environ.trap(builder, crate::TRAP_UNREACHABLE); + environ.trap(builder, crate::TRAP_UNREACHABLE, stack); stack.reachable = false; } /***************************** Control flow blocks ********************************** @@ -622,14 +622,20 @@ pub fn translate_operator( let tag_index = TagIndex::from_u32(*tag_index); let arity = environ.tag_param_arity(tag_index); let args = stack.peekn(arity); - environ.translate_exn_throw(builder, tag_index, args, stack.handlers.handlers())?; + environ.translate_exn_throw( + builder, + tag_index, + args, + stack.handlers.handlers(), + stack, + )?; stack.popn(arity); stack.reachable = false; } Operator::ThrowRef => { let exnref = stack.pop1(); - environ.translate_exn_throw_ref(builder, exnref, stack.handlers.handlers())?; + environ.translate_exn_throw_ref(builder, exnref, stack.handlers.handlers(), stack)?; stack.reachable = false; } @@ -1083,19 +1089,19 @@ pub fn translate_operator( } Operator::I64TruncF64S | Operator::I64TruncF32S => { let val = stack.pop1(); - stack.push1(environ.translate_fcvt_to_sint(builder, I64, val)); + stack.push1(environ.translate_fcvt_to_sint(builder, I64, val, stack)); } Operator::I32TruncF64S | Operator::I32TruncF32S => { let val = stack.pop1(); - stack.push1(environ.translate_fcvt_to_sint(builder, I32, val)); + stack.push1(environ.translate_fcvt_to_sint(builder, I32, val, stack)); } Operator::I64TruncF64U | Operator::I64TruncF32U => { let val = stack.pop1(); - stack.push1(environ.translate_fcvt_to_uint(builder, I64, val)); + stack.push1(environ.translate_fcvt_to_uint(builder, I64, val, stack)); } Operator::I32TruncF64U | Operator::I32TruncF32U => { let val = stack.pop1(); - stack.push1(environ.translate_fcvt_to_uint(builder, I32, val)); + stack.push1(environ.translate_fcvt_to_uint(builder, I32, val, stack)); } Operator::I64TruncSatF64S | Operator::I64TruncSatF32S => { let val = stack.pop1(); @@ -1222,19 +1228,19 @@ pub fn translate_operator( } Operator::I32DivS | Operator::I64DivS => { let (arg1, arg2) = stack.pop2(); - stack.push1(environ.translate_sdiv(builder, arg1, arg2)); + stack.push1(environ.translate_sdiv(builder, arg1, arg2, stack)); } Operator::I32DivU | Operator::I64DivU => { let (arg1, arg2) = stack.pop2(); - stack.push1(environ.translate_udiv(builder, arg1, arg2)); + stack.push1(environ.translate_udiv(builder, arg1, arg2, stack)); } Operator::I32RemS | Operator::I64RemS => { let (arg1, arg2) = stack.pop2(); - stack.push1(environ.translate_srem(builder, arg1, arg2)); + stack.push1(environ.translate_srem(builder, arg1, arg2, stack)); } Operator::I32RemU | Operator::I64RemU => { let (arg1, arg2) = stack.pop2(); - stack.push1(environ.translate_urem(builder, arg1, arg2)); + stack.push1(environ.translate_urem(builder, arg1, arg2, stack)); } Operator::F32Min | Operator::F64Min => { let (arg1, arg2) = stack.pop2(); @@ -1325,7 +1331,13 @@ pub fn translate_operator( } else { let index_type = environ.heaps()[heap].index_type(); let offset = builder.ins().iconst(index_type, memarg.offset as i64); - environ.uadd_overflow_trap(builder, addr, offset, ir::TrapCode::HEAP_OUT_OF_BOUNDS) + environ.uadd_overflow_trap( + builder, + addr, + offset, + ir::TrapCode::HEAP_OUT_OF_BOUNDS, + stack, + ) }; // `fn translate_atomic_wait` can inspect the type of `expected` to figure out what // code it needs to generate, if it wants. @@ -1349,7 +1361,13 @@ pub fn translate_operator( } else { let index_type = environ.heaps()[heap].index_type(); let offset = builder.ins().iconst(index_type, memarg.offset as i64); - environ.uadd_overflow_trap(builder, addr, offset, ir::TrapCode::HEAP_OUT_OF_BOUNDS) + environ.uadd_overflow_trap( + builder, + addr, + offset, + ir::TrapCode::HEAP_OUT_OF_BOUNDS, + stack, + ) }; let res = environ.translate_atomic_notify( builder, @@ -1606,13 +1624,13 @@ pub fn translate_operator( Operator::TableGet { table: index } => { let table_index = TableIndex::from_u32(*index); let index = stack.pop1(); - stack.push1(environ.translate_table_get(builder, table_index, index)?); + stack.push1(environ.translate_table_get(builder, table_index, index, stack)?); } Operator::TableSet { table: index } => { let table_index = TableIndex::from_u32(*index); let value = stack.pop1(); let index = stack.pop1(); - environ.translate_table_set(builder, table_index, value, index)?; + environ.translate_table_set(builder, table_index, value, index, stack)?; } Operator::TableCopy { dst_table: dst_table_index, @@ -2555,7 +2573,7 @@ pub fn translate_operator( unreachable!("validation") }; let is_null = environ.translate_ref_is_null(builder.cursor(), r, *r_ty)?; - environ.trapnz(builder, is_null, crate::TRAP_NULL_REFERENCE); + environ.trapnz(builder, is_null, crate::TRAP_NULL_REFERENCE, stack); stack.push1(r); } @@ -2566,12 +2584,12 @@ pub fn translate_operator( } Operator::I31GetS => { let i31ref = stack.pop1(); - let val = environ.translate_i31_get_s(builder, i31ref)?; + let val = environ.translate_i31_get_s(builder, i31ref, stack)?; stack.push1(val); } Operator::I31GetU => { let i31ref = stack.pop1(); - let val = environ.translate_i31_get_u(builder, i31ref)?; + let val = environ.translate_i31_get_u(builder, i31ref, stack)?; stack.push1(val); } @@ -2580,13 +2598,15 @@ pub fn translate_operator( let arity = environ.struct_fields_len(struct_type_index)?; let fields: StructFieldsVec = stack.peekn(arity).iter().copied().collect(); stack.popn(arity); - let struct_ref = environ.translate_struct_new(builder, struct_type_index, fields)?; + let struct_ref = + environ.translate_struct_new(builder, struct_type_index, fields, stack)?; stack.push1(struct_ref); } Operator::StructNewDefault { struct_type_index } => { let struct_type_index = TypeIndex::from_u32(*struct_type_index); - let struct_ref = environ.translate_struct_new_default(builder, struct_type_index)?; + let struct_ref = + environ.translate_struct_new_default(builder, struct_type_index, stack)?; stack.push1(struct_ref); } @@ -2603,6 +2623,7 @@ pub fn translate_operator( *field_index, struct_ref, val, + stack, )?; } @@ -2618,6 +2639,7 @@ pub fn translate_operator( *field_index, struct_ref, Some(Extension::Sign), + stack, )?; stack.push1(val); } @@ -2634,6 +2656,7 @@ pub fn translate_operator( *field_index, struct_ref, Some(Extension::Zero), + stack, )?; stack.push1(val); } @@ -2650,6 +2673,7 @@ pub fn translate_operator( *field_index, struct_ref, None, + stack, )?; stack.push1(val); } @@ -2657,13 +2681,15 @@ pub fn translate_operator( Operator::ArrayNew { array_type_index } => { let array_type_index = TypeIndex::from_u32(*array_type_index); let (elem, len) = stack.pop2(); - let array_ref = environ.translate_array_new(builder, array_type_index, elem, len)?; + let array_ref = + environ.translate_array_new(builder, array_type_index, elem, len, stack)?; stack.push1(array_ref); } Operator::ArrayNewDefault { array_type_index } => { let array_type_index = TypeIndex::from_u32(*array_type_index); let len = stack.pop1(); - let array_ref = environ.translate_array_new_default(builder, array_type_index, len)?; + let array_ref = + environ.translate_array_new_default(builder, array_type_index, len, stack)?; stack.push1(array_ref); } Operator::ArrayNewFixed { @@ -2673,7 +2699,8 @@ pub fn translate_operator( let array_type_index = TypeIndex::from_u32(*array_type_index); let array_size = usize::try_from(*array_size).unwrap(); let elems = stack.peekn(array_size); - let array_ref = environ.translate_array_new_fixed(builder, array_type_index, elems)?; + let array_ref = + environ.translate_array_new_fixed(builder, array_type_index, elems, stack)?; stack.popn(array_size); stack.push1(array_ref); } @@ -2730,7 +2757,15 @@ pub fn translate_operator( Operator::ArrayFill { array_type_index } => { let array_type_index = TypeIndex::from_u32(*array_type_index); let (array, index, val, len) = stack.pop4(); - environ.translate_array_fill(builder, array_type_index, array, index, val, len)?; + environ.translate_array_fill( + builder, + array_type_index, + array, + index, + val, + len, + stack, + )?; } Operator::ArrayInitData { array_type_index, @@ -2768,14 +2803,20 @@ pub fn translate_operator( } Operator::ArrayLen => { let array = stack.pop1(); - let len = environ.translate_array_len(builder, array)?; + let len = environ.translate_array_len(builder, array, stack)?; stack.push1(len); } Operator::ArrayGet { array_type_index } => { let array_type_index = TypeIndex::from_u32(*array_type_index); let (array, index) = stack.pop2(); - let elem = - environ.translate_array_get(builder, array_type_index, array, index, None)?; + let elem = environ.translate_array_get( + builder, + array_type_index, + array, + index, + None, + stack, + )?; stack.push1(elem); } Operator::ArrayGetS { array_type_index } => { @@ -2787,6 +2828,7 @@ pub fn translate_operator( array, index, Some(Extension::Sign), + stack, )?; stack.push1(elem); } @@ -2799,13 +2841,14 @@ pub fn translate_operator( array, index, Some(Extension::Zero), + stack, )?; stack.push1(elem); } Operator::ArraySet { array_type_index } => { let array_type_index = TypeIndex::from_u32(*array_type_index); let (array, index, elem) = stack.pop3(); - environ.translate_array_set(builder, array_type_index, array, index, elem)?; + environ.translate_array_set(builder, array_type_index, array, index, elem, stack)?; } Operator::RefEq => { let (r1, r2) = stack.pop2(); @@ -2827,6 +2870,7 @@ pub fn translate_operator( }, r, *r_ty, + stack, )?; stack.push1(result); } @@ -2844,6 +2888,7 @@ pub fn translate_operator( }, r, *r_ty, + stack, )?; stack.push1(result); } @@ -2861,8 +2906,9 @@ pub fn translate_operator( }, r, *r_ty, + stack, )?; - environ.trapz(builder, cast_okay, crate::TRAP_CAST_FAILURE); + environ.trapz(builder, cast_okay, crate::TRAP_CAST_FAILURE, stack); stack.push1(r); } Operator::RefCastNullable { hty } => { @@ -2879,8 +2925,9 @@ pub fn translate_operator( }, r, *r_ty, + stack, )?; - environ.trapz(builder, cast_okay, crate::TRAP_CAST_FAILURE); + environ.trapz(builder, cast_okay, crate::TRAP_CAST_FAILURE, stack); stack.push1(r); } Operator::BrOnCast { @@ -2894,7 +2941,7 @@ pub fn translate_operator( }; let to_ref_type = environ.convert_ref_type(*to_ref_type)?; - let cast_is_okay = environ.translate_ref_test(builder, to_ref_type, r, *r_ty)?; + let cast_is_okay = environ.translate_ref_test(builder, to_ref_type, r, *r_ty, stack)?; let (cast_succeeds_block, inputs) = translate_br_if_args(*relative_depth, stack); let cast_fails_block = builder.create_block(); @@ -2928,7 +2975,7 @@ pub fn translate_operator( }; let to_ref_type = environ.convert_ref_type(*to_ref_type)?; - let cast_is_okay = environ.translate_ref_test(builder, to_ref_type, r, *r_ty)?; + let cast_is_okay = environ.translate_ref_test(builder, to_ref_type, r, *r_ty, stack)?; let (cast_fails_block, inputs) = translate_br_if_args(*relative_depth, stack); let cast_succeeds_block = builder.create_block(); @@ -3453,6 +3500,7 @@ fn prepare_addr( access_size, }, ir::TrapCode::HEAP_OUT_OF_BOUNDS, + stack, ), // If the offset doesn't fit within a u32, then we can't pass it @@ -3490,6 +3538,7 @@ fn prepare_addr( index, offset, ir::TrapCode::HEAP_OUT_OF_BOUNDS, + stack, ); bounds_check_and_compute_addr( builder, @@ -3501,6 +3550,7 @@ fn prepare_addr( access_size, }, ir::TrapCode::HEAP_OUT_OF_BOUNDS, + stack, ) } }; @@ -3560,7 +3610,7 @@ fn align_atomic_addr( .ins() .band_imm(effective_addr, i64::from(loaded_bytes - 1)); let f = builder.ins().icmp_imm(IntCC::NotEqual, misalignment, 0); - environ.trapnz(builder, f, crate::TRAP_HEAP_MISALIGNED); + environ.trapnz(builder, f, crate::TRAP_HEAP_MISALIGNED, stack); } } @@ -4379,7 +4429,7 @@ fn create_catch_block( if let Some(tag) = tag { let tag = TagIndex::from_u32(tag); - params.extend(environ.translate_exn_unbox(builder, tag, exn_ref)?); + params.extend(environ.translate_exn_unbox(builder, tag, exn_ref, stacks)?); } if is_ref { params.push(exn_ref); diff --git a/crates/cranelift/src/translate/table.rs b/crates/cranelift/src/translate/table.rs index 4d01da7802c9..55fbc8fe4744 100644 --- a/crates/cranelift/src/translate/table.rs +++ b/crates/cranelift/src/translate/table.rs @@ -1,4 +1,5 @@ use crate::func_environ::FuncEnvironment; +use crate::translate::FuncTranslationStacks; use cranelift_codegen::cursor::FuncCursor; use cranelift_codegen::ir::{self, InstBuilder, condcodes::IntCC, immediates::Imm64}; use cranelift_codegen::isa::TargetIsa; @@ -62,6 +63,7 @@ impl TableData { env: &mut FuncEnvironment<'_>, pos: &mut FunctionBuilder, mut index: ir::Value, + stacks: &FuncTranslationStacks, ) -> (ir::Value, ir::MemFlags) { let index_ty = pos.func.dfg.value_type(index); let addr_ty = env.pointer_type(); @@ -78,7 +80,7 @@ impl TableData { .icmp(IntCC::UnsignedGreaterThanOrEqual, index, bound); if !spectre_mitigations_enabled { - env.trapnz(pos, oob, crate::TRAP_TABLE_OUT_OF_BOUNDS); + env.trapnz(pos, oob, crate::TRAP_TABLE_OUT_OF_BOUNDS, stacks); } // Convert `index` to `addr_ty`. diff --git a/crates/environ/src/vmoffsets.rs b/crates/environ/src/vmoffsets.rs index 60c88a2c520e..15795ca6aa98 100644 --- a/crates/environ/src/vmoffsets.rs +++ b/crates/environ/src/vmoffsets.rs @@ -196,26 +196,26 @@ pub trait PtrSize { /// Return the offset of the `gc_heap.base` field within a `VMStoreContext`. fn vmstore_context_gc_heap_base(&self) -> u8 { let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base(); - debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp()); + debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp()); offset } /// Return the offset of the `gc_heap.current_length` field within a `VMStoreContext`. fn vmstore_context_gc_heap_current_length(&self) -> u8 { let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length(); - debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp()); + debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp()); offset } - /// Return the offset of the `last_wasm_exit_trampoline_fp` field - /// of `VMStoreContext`. - fn vmstore_context_last_wasm_exit_trampoline_fp(&self) -> u8 { + /// Return the offset of the `last_wasm_exit_fp` field of + /// `VMStoreContext`. + fn vmstore_context_last_wasm_exit_fp(&self) -> u8 { self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition() } /// Return the offset of the `last_wasm_exit_pc` field of `VMStoreContext`. fn vmstore_context_last_wasm_exit_pc(&self) -> u8 { - self.vmstore_context_last_wasm_exit_trampoline_fp() + self.size() + self.vmstore_context_last_wasm_exit_fp() + self.size() } /// Return the offset of the `last_wasm_entry_sp` field of `VMStoreContext`. diff --git a/crates/wasmtime/src/runtime/debug.rs b/crates/wasmtime/src/runtime/debug.rs index 70bcef9526a0..1e681564e3cd 100644 --- a/crates/wasmtime/src/runtime/debug.rs +++ b/crates/wasmtime/src/runtime/debug.rs @@ -43,10 +43,11 @@ impl<'a, T> StoreContextMut<'a, T> { // `StoreOpaque`), which owns all active stacks in the // store. We do not provide any API that could mutate the // frames that we are walking on the `DebugFrameCursor`. + let is_trapping_frame = unsafe { *self.0.vm_store_context().last_wasm_exit_was_trap.get() }; let iter = unsafe { CurrentActivationBacktrace::new(self) }; let mut view = DebugFrameCursor { iter, - is_trapping_frame: false, + is_trapping_frame, frames: vec![], current: None, }; diff --git a/crates/wasmtime/src/runtime/func.rs b/crates/wasmtime/src/runtime/func.rs index 7de4f2229be4..a8cdc8a411b1 100644 --- a/crates/wasmtime/src/runtime/func.rs +++ b/crates/wasmtime/src/runtime/func.rs @@ -1525,7 +1525,8 @@ pub(crate) struct EntryStoreContext { /// `VMStoreContext` when exiting Wasm. pub stack_limit: Option, pub last_wasm_exit_pc: usize, - pub last_wasm_exit_trampoline_fp: usize, + pub last_wasm_exit_fp: usize, + pub last_wasm_exit_was_trap: bool, pub last_wasm_entry_fp: usize, pub last_wasm_entry_sp: usize, pub last_wasm_entry_trap_handler: usize, @@ -1622,9 +1623,8 @@ impl EntryStoreContext { Self { stack_limit, last_wasm_exit_pc: *(*vm_store_context).last_wasm_exit_pc.get(), - last_wasm_exit_trampoline_fp: *(*vm_store_context) - .last_wasm_exit_trampoline_fp - .get(), + last_wasm_exit_fp: *(*vm_store_context).last_wasm_exit_fp.get(), + last_wasm_exit_was_trap: *(*vm_store_context).last_wasm_exit_was_trap.get(), last_wasm_entry_fp: *(*vm_store_context).last_wasm_entry_fp.get(), last_wasm_entry_sp: *(*vm_store_context).last_wasm_entry_sp.get(), last_wasm_entry_trap_handler: *(*vm_store_context) @@ -1647,9 +1647,9 @@ impl EntryStoreContext { *(&*self.vm_store_context).stack_limit.get() = limit; } - *(*self.vm_store_context).last_wasm_exit_trampoline_fp.get() = - self.last_wasm_exit_trampoline_fp; *(*self.vm_store_context).last_wasm_exit_pc.get() = self.last_wasm_exit_pc; + *(*self.vm_store_context).last_wasm_exit_fp.get() = self.last_wasm_exit_fp; + *(*self.vm_store_context).last_wasm_exit_was_trap.get() = self.last_wasm_exit_was_trap; *(*self.vm_store_context).last_wasm_entry_fp.get() = self.last_wasm_entry_fp; *(*self.vm_store_context).last_wasm_entry_sp.get() = self.last_wasm_entry_sp; *(*self.vm_store_context).last_wasm_entry_trap_handler.get() = diff --git a/crates/wasmtime/src/runtime/trap.rs b/crates/wasmtime/src/runtime/trap.rs index 3089e4c4ef5f..49fee82b27c0 100644 --- a/crates/wasmtime/src/runtime/trap.rs +++ b/crates/wasmtime/src/runtime/trap.rs @@ -99,6 +99,7 @@ pub(crate) fn from_runtime_box( crate::runtime::vm::TrapReason::User(error) => (error, None), crate::runtime::vm::TrapReason::Jit { pc, + fp: _, faulting_addr, trap, } => { diff --git a/crates/wasmtime/src/runtime/vm/interpreter.rs b/crates/wasmtime/src/runtime/vm/interpreter.rs index f67e088d101a..f2e11218f847 100644 --- a/crates/wasmtime/src/runtime/vm/interpreter.rs +++ b/crates/wasmtime/src/runtime/vm/interpreter.rs @@ -471,6 +471,8 @@ impl InterpreterRef<'_> { TrapKind::StackOverflow => Trap::StackOverflow, }; s.set_jit_trap(regs, None, trap); + log::trace!("about to invoke debug event from interpreter"); + s.debug_event_from_interpreter(); s.entry_trap_handler() } None => { diff --git a/crates/wasmtime/src/runtime/vm/throw.rs b/crates/wasmtime/src/runtime/vm/throw.rs index ba5105fe112f..75cc5c42064f 100644 --- a/crates/wasmtime/src/runtime/vm/throw.rs +++ b/crates/wasmtime/src/runtime/vm/throw.rs @@ -32,7 +32,7 @@ pub unsafe fn compute_handler(store: &mut dyn VMStore) -> Option { let (exit_pc, exit_fp, entry_fp) = unsafe { ( *nogc.vm_store_context().last_wasm_exit_pc.get(), - nogc.vm_store_context().last_wasm_exit_fp(), + *nogc.vm_store_context().last_wasm_exit_fp.get(), *nogc.vm_store_context().last_wasm_entry_fp.get(), ) }; diff --git a/crates/wasmtime/src/runtime/vm/traphandlers.rs b/crates/wasmtime/src/runtime/vm/traphandlers.rs index 353f8e608347..774c989420a2 100644 --- a/crates/wasmtime/src/runtime/vm/traphandlers.rs +++ b/crates/wasmtime/src/runtime/vm/traphandlers.rs @@ -18,7 +18,7 @@ pub use self::signals::*; #[cfg(feature = "gc")] use crate::ThrownException; use crate::runtime::module::lookup_code; -use crate::runtime::store::{ExecutorRef, StoreOpaque}; +use crate::runtime::store::ExecutorRef; use crate::runtime::vm::sys::traphandlers; use crate::runtime::vm::{InterpreterRef, VMContext, VMStore, VMStoreContext, f32x4, f64x2, i8x16}; #[cfg(feature = "debug")] @@ -367,6 +367,10 @@ pub enum TrapReason { /// the trapping address to a trap code. pc: usize, + /// The FP register from when this trap originated. + #[allow(dead_code, reason = "used in debug configuration")] + fp: usize, + /// If the trap was a memory-related trap such as SIGSEGV then this /// field will contain the address of the inaccessible data. /// @@ -546,6 +550,56 @@ mod call_thread_state { pub(crate) vm_store_context: NonNull, pub(crate) unwinder: &'static dyn Unwind, + /// Raw pointer to the `*mut dyn VMStore` running in this + /// activation, to be used *only* when re-entering the host + /// during a trap. + /// + /// This is a very tricky ownership/provenance dance. When + /// control is in the Wasm code itself, the store is + /// completely owned by the Wasm. It passes ownership back + /// during hostcalls via mutable reborrow (as with any call + /// with a `&mut self` in Rust). That's all well and good for + /// explicit calls. + /// + /// When a trap occurs, however, we can also think of the + /// ownership passing as-if the trapping instruction were a + /// hostcall with a `&mut dyn VMStore` parameter. This works + /// *as long as* all possibly trapping points in compiled code + /// act as if they invalidate any other held borrows into the + /// store. + /// + /// It turns out that we generally enforce this in compiled + /// guest code in Cranelift: any `can_trap` opcode returns + /// `true` from `has_memory_fence_semantics()` (see + /// corresponding comment there). This is enough to ensure + /// that the compiler treats every trapping op as-if it were a + /// hostcall, which clobbers all memory state; so from the + /// Wasm code's point of view, it is safely reborrowing the + /// Store and passing it "somewhere" on every trap. The + /// plumbing for that "passing" goes through this field, but + /// that is an implementation detail. When control comes back + /// out of the Wasm activation, we clear this field; the + /// invocation itself takes a mutable borrow of the store, so + /// safety is preserved on the caller side as well. In other + /// words, the provenance is something like + /// + /// ```plain + /// + /// host (caller side) with `&mut dyn VMStore` + /// / \ + /// (param into / (this field) + /// entry trampoline) \ + /// | | + /// ~~~~~~~ (wasm code) ~~~~~~ + /// | | + /// libcall trap + /// ``` + /// + /// with only *one* of those paths dynamically taken at any + /// given time. + #[cfg(all(feature = "debug", feature = "pulley"))] + pub(crate) raw_store: NonNull, + pub(super) prev: Cell, // The state of the runtime for the *previous* `CallThreadState` for @@ -570,7 +624,7 @@ mod call_thread_state { impl CallThreadState { #[inline] pub(super) fn new( - store: &mut StoreOpaque, + store: &mut dyn VMStore, old_state: *mut EntryStoreContext, ) -> CallThreadState { CallThreadState { @@ -582,6 +636,8 @@ mod call_thread_state { #[cfg(feature = "coredump")] capture_coredump: store.engine().config().coredump_on_trap, vm_store_context: store.vm_store_context_ptr(), + #[cfg(all(feature = "debug", feature = "pulley"))] + raw_store: NonNull::from_mut(store), prev: Cell::new(ptr::null()), old_state, } @@ -594,10 +650,7 @@ mod call_thread_state { /// Requires that the saved last Wasm trampoline FP points to /// a valid trampoline frame, or is null. pub unsafe fn old_last_wasm_exit_fp(&self) -> usize { - let trampoline_fp = unsafe { (&*self.old_state).last_wasm_exit_trampoline_fp }; - // SAFETY: `trampoline_fp` is either a valid FP from an - // active trampoline frame or is null. - unsafe { VMStoreContext::wasm_exit_fp_from_trampoline_fp(trampoline_fp) } + unsafe { (*self.old_state).last_wasm_exit_fp } } /// Get the saved PC upon exit from Wasm for the previous `CallThreadState`. @@ -671,13 +724,17 @@ mod call_thread_state { unsafe { let cx = self.vm_store_context.as_ref(); swap( - &cx.last_wasm_exit_trampoline_fp, - &mut (*self.old_state).last_wasm_exit_trampoline_fp, + &cx.last_wasm_exit_fp, + &mut (*self.old_state).last_wasm_exit_fp, ); swap( &cx.last_wasm_exit_pc, &mut (*self.old_state).last_wasm_exit_pc, ); + swap( + &cx.last_wasm_exit_was_trap, + &mut (*self.old_state).last_wasm_exit_was_trap, + ); swap( &cx.last_wasm_entry_fp, &mut (*self.old_state).last_wasm_entry_fp, @@ -826,73 +883,7 @@ impl CallThreadState { unsafe fn unwind(&self, store: &mut dyn VMStore) { #[allow(unused_mut, reason = "only mutated in `debug` configuration")] let mut unwind = self.unwind.replace(UnwindState::None); - - #[cfg(feature = "debug")] - { - let result = match &unwind { - UnwindState::UnwindToWasm(_) => { - assert!(store.as_store_opaque().has_pending_exception()); - let exn = store - .as_store_opaque() - .pending_exception_owned_rooted() - .expect("exception should be set when we are throwing"); - store.block_on_debug_handler(crate::DebugEvent::CaughtExceptionThrown(exn)) - } - - UnwindState::UnwindToHost { - reason: UnwindReason::Trap(TrapReason::Exception), - .. - } => { - use crate::store::AsStoreOpaque; - let exn = store - .as_store_opaque() - .pending_exception_owned_rooted() - .expect("exception should be set when we are throwing"); - store.block_on_debug_handler(crate::DebugEvent::UncaughtExceptionThrown( - exn.clone(), - )) - } - UnwindState::UnwindToHost { - reason: UnwindReason::Trap(TrapReason::Wasm(trap)), - .. - } => store.block_on_debug_handler(crate::DebugEvent::Trap(*trap)), - UnwindState::UnwindToHost { - reason: UnwindReason::Trap(TrapReason::User(err)), - .. - } => store.block_on_debug_handler(crate::DebugEvent::HostcallError(err)), - - UnwindState::UnwindToHost { - reason: UnwindReason::Trap(TrapReason::Jit { .. }), - .. - } => { - // JIT traps not handled yet. - Ok(()) - } - #[cfg(all(feature = "std", panic = "unwind"))] - UnwindState::UnwindToHost { - reason: UnwindReason::Panic(_), - .. - } => { - // We don't invoke any debugger hook when we're - // unwinding due to a Rust (host-side) panic. - Ok(()) - } - - UnwindState::None => unreachable!(), - }; - - // If the debugger invocation itself resulted in an `Err` - // (which can only come from the `block_on` hitting a - // failure mode), we need to override our unwind as-if - // were handling a host error. - if let Err(err) = result { - unwind = UnwindState::UnwindToHost { - reason: UnwindReason::Trap(TrapReason::User(err)), - backtrace: None, - coredump_stack: None, - }; - } - } + self.debug_event_on_unwind(store, &mut unwind); match unwind { UnwindState::UnwindToHost { .. } => { @@ -937,6 +928,114 @@ impl CallThreadState { } } + /// From the Pulley interpreter, perform a fiber suspend for a + /// debug event handler after setting the unwind state with + /// `set_jit_trap`. + #[cfg(all(feature = "debug", feature = "pulley"))] + pub(crate) fn debug_event_from_interpreter(&self) { + let mut unwind = self.unwind.replace(UnwindState::None); + // SAFETY: this is invoked only within a trapping context when + // we have received control back from the Wasm code. See the + // provenance diagram and comments on `self.raw_store` for + // more details. + let store = unsafe { self.raw_store.as_ptr().as_mut().unwrap() }; + self.debug_event_on_unwind(store, &mut unwind); + self.unwind.set(unwind); + } + + /// Suspend from the current fiber, blocking on an async debug + /// callback hook, if any, if the `unwind` state merits a debug + /// event. + #[cfg(feature = "debug")] + fn debug_event_on_unwind(&self, store: &mut dyn VMStore, unwind: &mut UnwindState) { + let result = match unwind { + UnwindState::UnwindToWasm(_) => { + assert!(store.as_store_opaque().has_pending_exception()); + let exn = store + .as_store_opaque() + .pending_exception_owned_rooted() + .expect("exception should be set when we are throwing"); + store.block_on_debug_handler(crate::DebugEvent::CaughtExceptionThrown(exn)) + } + + UnwindState::UnwindToHost { + reason: UnwindReason::Trap(TrapReason::Exception), + .. + } => { + use crate::store::AsStoreOpaque; + let exn = store + .as_store_opaque() + .pending_exception_owned_rooted() + .expect("exception should be set when we are throwing"); + store + .block_on_debug_handler(crate::DebugEvent::UncaughtExceptionThrown(exn.clone())) + } + UnwindState::UnwindToHost { + reason: UnwindReason::Trap(TrapReason::Wasm(trap)), + .. + } => store.block_on_debug_handler(crate::DebugEvent::Trap(*trap)), + UnwindState::UnwindToHost { + reason: UnwindReason::Trap(TrapReason::User(err)), + .. + } => store.block_on_debug_handler(crate::DebugEvent::HostcallError(err)), + + UnwindState::UnwindToHost { + reason: UnwindReason::Trap(TrapReason::Jit { pc, fp, trap, .. }), + .. + } => self.with_trap_exit_state(*pc, *fp, |_| { + store.block_on_debug_handler(crate::DebugEvent::Trap(*trap)) + }), + #[cfg(all(feature = "std", panic = "unwind"))] + UnwindState::UnwindToHost { + reason: UnwindReason::Panic(_), + .. + } => { + // We don't invoke any debugger hook when we're + // unwinding due to a Rust (host-side) panic. + Ok(()) + } + + UnwindState::None => unreachable!(), + }; + + // If the debugger invocation itself resulted in an `Err` + // (which can only come from the `block_on` hitting a + // failure mode), we need to override our unwind as-if + // were handling a host error. + if let Err(err) = result { + *unwind = UnwindState::UnwindToHost { + reason: UnwindReason::Trap(TrapReason::User(err)), + backtrace: None, + coredump_stack: None, + }; + } + } + + #[cfg(not(feature = "debug"))] + pub(crate) fn debug_event_on_unwind( + &self, + _store: &mut dyn VMStore, + _unwind: &mut UnwindState, + ) { + } + + /// Set up our state according to a trap exit back to the host. + #[cfg(feature = "debug")] + fn with_trap_exit_state R>(&self, pc: usize, fp: usize, f: F) -> R { + unsafe { + let cx = self.vm_store_context.as_ref(); + *cx.last_wasm_exit_pc.get() = pc; + *cx.last_wasm_exit_fp.get() = fp; + *cx.last_wasm_exit_was_trap.get() = true; + } + let result = f(self); + unsafe { + let cx = self.vm_store_context.as_ref(); + *cx.last_wasm_exit_was_trap.get() = false; + } + result + } + pub(crate) fn entry_trap_handler(&self) -> Handler { unsafe { let vm_store_context = self.vm_store_context.as_ref(); @@ -1053,6 +1152,7 @@ impl CallThreadState { self.unwind.set(UnwindState::UnwindToHost { reason: UnwindReason::Trap(TrapReason::Jit { pc, + fp, faulting_addr, trap, }), diff --git a/crates/wasmtime/src/runtime/vm/traphandlers/backtrace.rs b/crates/wasmtime/src/runtime/vm/traphandlers/backtrace.rs index be9403e97f88..f34282c9b41b 100644 --- a/crates/wasmtime/src/runtime/vm/traphandlers/backtrace.rs +++ b/crates/wasmtime/src/runtime/vm/traphandlers/backtrace.rs @@ -175,7 +175,7 @@ impl Backtrace { // through the Wasm-to-host trampoline. None => unsafe { let pc = *(*vm_store_context).last_wasm_exit_pc.get(); - let fp = (*vm_store_context).last_wasm_exit_fp(); + let fp = *(*vm_store_context).last_wasm_exit_fp.get(); (pc, fp) }, }; @@ -364,8 +364,11 @@ impl<'a, T: 'static> CurrentActivationBacktrace<'a, T> { // Get the initial exit FP, exit PC, and entry FP. let vm_store_context = store.0.vm_store_context(); let exit_pc = unsafe { *(*vm_store_context).last_wasm_exit_pc.get() }; - let exit_fp = unsafe { (*vm_store_context).last_wasm_exit_fp() }; + let exit_fp = unsafe { *(*vm_store_context).last_wasm_exit_fp.get() }; let trampoline_fp = unsafe { *(*vm_store_context).last_wasm_entry_fp.get() }; + log::trace!( + "activation backtrace: exit_pc {exit_pc:x} exit_fp {exit_fp:x} entry_fp {trampoline_fp:x}" + ); let inner: Box> = if exit_fp == 0 { // No activations on this Store; return an empty iterator. Box::new(core::iter::empty()) diff --git a/crates/wasmtime/src/runtime/vm/vmcontext.rs b/crates/wasmtime/src/runtime/vm/vmcontext.rs index ef714422d688..2e11ae69bc60 100644 --- a/crates/wasmtime/src/runtime/vm/vmcontext.rs +++ b/crates/wasmtime/src/runtime/vm/vmcontext.rs @@ -1110,8 +1110,8 @@ pub struct VMStoreContext { /// The `VMMemoryDefinition` for this store's GC heap. pub gc_heap: VMMemoryDefinition, - /// The value of the frame pointer register in the trampoline used - /// to call from Wasm to the host. + /// The value of the frame pointer register in the Wasm frame that + /// called from Wasm to the host. /// /// Maintained by our Wasm-to-host trampoline, and cleared just /// before calling into Wasm in `catch_traps`. @@ -1120,13 +1120,8 @@ pub struct VMStoreContext { /// to the host. /// /// Used to find the start of a contiguous sequence of Wasm frames - /// when walking the stack. Note that we record the FP of the - /// *trampoline*'s frame, not the last Wasm frame, because we need - /// to know the SP (bottom of frame) of the last Wasm frame as - /// well in case we need to resume to an exception handler in that - /// frame. The FP of the last Wasm frame can be recovered by - /// loading the saved FP value at this FP address. - pub last_wasm_exit_trampoline_fp: UnsafeCell, + /// when walking the stack. + pub last_wasm_exit_fp: UnsafeCell, /// The last Wasm program counter before we called from Wasm to the host. /// @@ -1188,59 +1183,18 @@ pub struct VMStoreContext { /// situation while this field is read it'll never classify a fault as an /// guard page fault. pub async_guard_range: Range<*mut u8>, -} - -impl VMStoreContext { - /// From the current saved trampoline FP, get the FP of the last - /// Wasm frame. If the current saved trampoline FP is null, return - /// null. - /// - /// We store only the trampoline FP, because (i) we need the - /// trampoline FP, so we know the size (bottom) of the last Wasm - /// frame; and (ii) the last Wasm frame, just above the trampoline - /// frame, can be recovered via the FP chain. - /// - /// # Safety - /// - /// This function requires that the `last_wasm_exit_trampoline_fp` - /// field either points to an active trampoline frame or is a null - /// pointer. - pub(crate) unsafe fn last_wasm_exit_fp(&self) -> usize { - // SAFETY: the unsafe cell is safe to load (no other threads - // will be writing our store when we have control), and the - // helper function's safety condition is the same as ours. - unsafe { - let trampoline_fp = *self.last_wasm_exit_trampoline_fp.get(); - Self::wasm_exit_fp_from_trampoline_fp(trampoline_fp) - } - } - /// From any saved trampoline FP, get the FP of the last Wasm - /// frame. If the given trampoline FP is null, return null. - /// - /// This differs from `last_wasm_exit_fp()` above in that it - /// allows accessing activations further up the stack as well, - /// e.g. via `CallThreadState::old_state`. + /// The last Wasm exit to host was due to a trap. /// - /// # Safety + /// This is set whenever we update the exit state *from the host* + /// when handling a trap. It allows us to interpret the exit PC + /// correctly -- that is, either pointing *to* a trapping + /// instruction, or *after* a call (a single PC could be both + /// after a call and at a trapping instruction!). /// - /// This function requires that the provided FP value is valid, - /// and points to an active trampoline frame, or is null. - /// - /// This function depends on the invariant that on all supported - /// architectures, we store the previous FP value under the - /// current FP. This is a property of our ABI that we control and - /// ensure. - pub(crate) unsafe fn wasm_exit_fp_from_trampoline_fp(trampoline_fp: usize) -> usize { - if trampoline_fp != 0 { - // SAFETY: We require that trampoline_fp points to a valid - // frame, which will (by definition) contain an old FP value - // that we can load. - unsafe { *(trampoline_fp as *const usize) } - } else { - 0 - } - } + /// It is set *only* from host code, but is kept here alongside + /// the other last-exit state for consistency. + pub last_wasm_exit_was_trap: UnsafeCell, } // The `VMStoreContext` type is a pod-type with no destructor, and we don't @@ -1263,7 +1217,7 @@ impl Default for VMStoreContext { base: NonNull::dangling().into(), current_length: AtomicUsize::new(0), }, - last_wasm_exit_trampoline_fp: UnsafeCell::new(0), + last_wasm_exit_fp: UnsafeCell::new(0), last_wasm_exit_pc: UnsafeCell::new(0), last_wasm_entry_fp: UnsafeCell::new(0), last_wasm_entry_sp: UnsafeCell::new(0), @@ -1271,6 +1225,7 @@ impl Default for VMStoreContext { stack_chain: UnsafeCell::new(VMStackChain::Absent), async_guard_range: ptr::null_mut()..ptr::null_mut(), store_data: VmPtr::dangling(), + last_wasm_exit_was_trap: UnsafeCell::new(false), } } } @@ -1310,8 +1265,8 @@ mod test_vmstore_context { usize::from(offsets.ptr.vmstore_context_gc_heap_current_length()) ); assert_eq!( - offset_of!(VMStoreContext, last_wasm_exit_trampoline_fp), - usize::from(offsets.ptr.vmstore_context_last_wasm_exit_trampoline_fp()) + offset_of!(VMStoreContext, last_wasm_exit_fp), + usize::from(offsets.ptr.vmstore_context_last_wasm_exit_fp()) ); assert_eq!( offset_of!(VMStoreContext, last_wasm_exit_pc), diff --git a/tests/all/debug.rs b/tests/all/debug.rs index 904b0b33c6b7..3eab430e7466 100644 --- a/tests/all/debug.rs +++ b/tests/all/debug.rs @@ -363,12 +363,6 @@ async fn caught_exception_events() -> anyhow::Result<()> { #[tokio::test] #[cfg_attr(miri, ignore)] -#[cfg(any( - target_arch = "x86_64", - target_arch = "aarch64", - target_arch = "s390x", - target_arch = "riscv64" -))] async fn hostcall_trap_events() -> anyhow::Result<()> { let _ = env_logger::try_init(); @@ -392,7 +386,14 @@ async fn hostcall_trap_events() -> anyhow::Result<()> { debug_event_checker!( D, store, { 0 ; - wasmtime::DebugEvent::Trap(wasmtime_environ::Trap::IntegerDivisionByZero) => {} + wasmtime::DebugEvent::Trap(wasmtime_environ::Trap::IntegerDivisionByZero) => { + let mut stack = store.debug_frames().unwrap(); + assert!(!stack.done()); + assert_eq!(stack.wasm_function_index_and_pc().unwrap().0.as_u32(), 0); + assert_eq!(stack.wasm_function_index_and_pc().unwrap().1, 37); + stack.move_to_parent(); + assert!(stack.done()); + } } ); diff --git a/tests/disas/debug-exceptions.wat b/tests/disas/debug-exceptions.wat index 540226aba234..bc94e4fc1af4 100644 --- a/tests/disas/debug-exceptions.wat +++ b/tests/disas/debug-exceptions.wat @@ -40,14 +40,14 @@ ;; ╰─╼ debug frame state (before next inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 64, slot at FP-0xb0, locals , stack ;; ldur x2, [sp, #0x10] ;; ╰─╼ debug frame state (before next inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 66, slot at FP-0xb0, locals , stack I32 @ slot+0x8 -;; bl #0x31c +;; bl #0x328 ;; 60: mov x21, x2 ;; mov w3, #0x4000000 ;; mov w4, #2 ;; mov w5, #0x28 ;; mov w6, #8 ;; ldur x2, [sp, #0x10] -;; bl #0x2a8 +;; bl #0x2b0 ;; 7c: ldur x11, [sp, #0x10] ;; ldr x0, [x11, #8] ;; ldr x5, [x0, #0x18] @@ -62,7 +62,7 @@ ;; str w3, [x4, w2, uxtw] ;; mov x3, x2 ;; ldur x2, [sp, #0x10] -;; bl #0x354 +;; bl #0x364 ;; ├─╼ exception frame offset: SP = FP - 0xb0 ;; ╰─╼ exception handler: tag=0, context at [SP+0x10], handler=0xbc ;; b8: .byte 0x1f, 0xc1, 0x00, 0x00 diff --git a/tests/disas/epoch-interruption-x86.wat b/tests/disas/epoch-interruption-x86.wat index 043114a2b2cd..7ba7c7666c1a 100644 --- a/tests/disas/epoch-interruption-x86.wat +++ b/tests/disas/epoch-interruption-x86.wat @@ -28,12 +28,12 @@ ;; jae 0x64 ;; jmp 0x46 ;; 57: movq %r15, %rdi -;; callq 0xe1 +;; callq 0xe3 ;; jmp 0x46 ;; 64: movq 8(%r13), %rax ;; cmpq %rax, %r11 ;; jb 0x46 ;; 71: movq %r15, %rdi -;; callq 0xe1 +;; callq 0xe3 ;; jmp 0x46 ;; 7e: ud2 diff --git a/tests/disas/exceptions.wat b/tests/disas/exceptions.wat index c125c8b60cdd..209547d854d7 100644 --- a/tests/disas/exceptions.wat +++ b/tests/disas/exceptions.wat @@ -33,14 +33,14 @@ ;; movq %rdi, %r12 ;; movq %rcx, %r13 ;; movq %rdx, %r15 -;; callq 0x3b2 +;; callq 0x3ba ;; movq %rax, %r14 ;; movl $0x4000000, %esi ;; movl $3, %edx ;; movl $0x30, %ecx ;; movl $8, %r8d ;; movq %r12, %rdi -;; callq 0x34f +;; callq 0x353 ;; movq 8(%r12), %r8 ;; movq 0x18(%r8), %r8 ;; movl %eax, %r9d @@ -54,7 +54,7 @@ ;; movq %rax, %rsi ;; movq %r12, %rdi ;; movq %r12, (%rsp) -;; callq 0x3de +;; callq 0x3e9 ;; ud2 ;; ud2 ;; diff --git a/tests/disas/gc/struct-new-stack-map.wat b/tests/disas/gc/struct-new-stack-map.wat index a58654548d21..8df655d16325 100644 --- a/tests/disas/gc/struct-new-stack-map.wat +++ b/tests/disas/gc/struct-new-stack-map.wat @@ -30,7 +30,7 @@ ;; movl $0x28, %ecx ;; movl $8, %r8d ;; movq %rdi, %r13 -;; callq 0x12f +;; callq 0x133 ;; movq 8(%r13), %rdx ;; ╰─╼ stack_map: frame_size=48, frame_offsets=[0] ;; movq 0x18(%rdx), %rdx diff --git a/tests/disas/pulley/epoch-simple.wat b/tests/disas/pulley/epoch-simple.wat index 947f92812732..6322437bb456 100644 --- a/tests/disas/pulley/epoch-simple.wat +++ b/tests/disas/pulley/epoch-simple.wat @@ -14,5 +14,5 @@ ;; br_if_xulteq64 x7, x6, 0x9 // target = 0x26 ;; 24: pop_frame ;; ret -;; 26: call 0x6f // target = 0x95 +;; 26: call 0x76 // target = 0x9c ;; 2b: jump -0x7 // target = 0x24 diff --git a/tests/disas/riscv64-component-builtins-asm.wat b/tests/disas/riscv64-component-builtins-asm.wat index dd6a465899e7..b67cc9ff5daa 100644 --- a/tests/disas/riscv64-component-builtins-asm.wat +++ b/tests/disas/riscv64-component-builtins-asm.wat @@ -21,7 +21,7 @@ ;; mv s1, a1 ;; mv a3, a2 ;; ld a4, 0x10(a0) -;; mv a5, s0 +;; ld a5, 0(s0) ;; sd a5, 0x28(a4) ;; ld a5, 8(s0) ;; sd a5, 0x30(a4) diff --git a/tests/disas/riscv64-component-builtins.wat b/tests/disas/riscv64-component-builtins.wat index 4a1102fed04a..407777f1c80f 100644 --- a/tests/disas/riscv64-component-builtins.wat +++ b/tests/disas/riscv64-component-builtins.wat @@ -17,25 +17,26 @@ ;; block0(v0: i64, v1: i64, v2: i32): ;; v3 = load.i64 notrap aligned v0+16 ;; v4 = get_frame_pointer.i64 -;; store notrap aligned v4, v3+40 -;; v5 = get_return_address.i64 -;; store notrap aligned v5, v3+48 -;; v8 = load.i64 notrap aligned readonly v0+8 -;; v9 = load.i64 notrap aligned readonly v8+16 -;; v6 = iconst.i32 0 -;; v10 = call_indirect sig0, v9(v0, v6, v6, v2) ; v6 = 0, v6 = 0 -;; v11 = iconst.i64 -1 -;; v12 = icmp ne v10, v11 ; v11 = -1 -;; brif v12, block2, block1 +;; v5 = load.i64 notrap aligned v4 +;; store notrap aligned v5, v3+40 +;; v6 = get_return_address.i64 +;; store notrap aligned v6, v3+48 +;; v9 = load.i64 notrap aligned readonly v0+8 +;; v10 = load.i64 notrap aligned readonly v9+16 +;; v7 = iconst.i32 0 +;; v11 = call_indirect sig0, v10(v0, v7, v7, v2) ; v7 = 0, v7 = 0 +;; v12 = iconst.i64 -1 +;; v13 = icmp ne v11, v12 ; v12 = -1 +;; brif v13, block2, block1 ;; ;; block1 cold: -;; v13 = load.i64 notrap aligned readonly v1+16 -;; v14 = load.i64 notrap aligned readonly v13+408 -;; call_indirect sig1, v14(v1) +;; v14 = load.i64 notrap aligned readonly v1+16 +;; v15 = load.i64 notrap aligned readonly v14+408 +;; call_indirect sig1, v15(v1) ;; trap user1 ;; ;; block2: -;; brif.i64 v10, block3, block4 +;; brif.i64 v11, block3, block4 ;; ;; block3: ;; jump block4 diff --git a/tests/disas/trunc.wat b/tests/disas/trunc.wat index e6acff281e3d..a97bec5e0ed6 100644 --- a/tests/disas/trunc.wat +++ b/tests/disas/trunc.wat @@ -24,7 +24,7 @@ ;; jne 0x101 ;; 39: movq %r14, %rdi ;; movdqu (%rsp), %xmm0 -;; callq 0x245 +;; callq 0x247 ;; movabsq $13830554455654793216, %rax ;; movq %rax, %xmm6 ;; ucomisd %xmm0, %xmm6 @@ -55,27 +55,27 @@ ;; retq ;; d3: movl $6, %esi ;; d8: movq %r14, %rdi -;; db: callq 0x271 +;; db: callq 0x276 ;; e0: movq %r14, %rdi -;; e3: callq 0x2a1 +;; e3: callq 0x2a9 ;; e8: ud2 ;; ea: movl $6, %esi ;; ef: movq %r14, %rdi -;; f2: callq 0x271 +;; f2: callq 0x276 ;; f7: movq %r14, %rdi -;; fa: callq 0x2a1 +;; fa: callq 0x2a9 ;; ff: ud2 ;; 101: movl $8, %esi ;; 106: movq %r14, %rdi -;; 109: callq 0x271 +;; 109: callq 0x276 ;; 10e: movq %r14, %rdi -;; 111: callq 0x2a1 +;; 111: callq 0x2a9 ;; 116: ud2 ;; 118: xorl %esi, %esi ;; 11a: movq %r14, %rdi -;; 11d: callq 0x271 +;; 11d: callq 0x276 ;; 122: movq %r14, %rdi -;; 125: callq 0x2a1 +;; 125: callq 0x2a9 ;; 12a: ud2 ;; 12c: ud2 ;; 12e: ud2 diff --git a/tests/disas/trunc32.wat b/tests/disas/trunc32.wat index 8e5e2e8a631b..2b719dfe183c 100644 --- a/tests/disas/trunc32.wat +++ b/tests/disas/trunc32.wat @@ -26,7 +26,7 @@ ;; jp 0xf6 ;; jne 0xf6 ;; 46: movq %r12, %rdi -;; callq 0x243 +;; callq 0x245 ;; movabsq $13830554455654793216, %r8 ;; movq %r8, %xmm1 ;; ucomisd %xmm0, %xmm1 @@ -56,27 +56,27 @@ ;; retq ;; c8: movl $6, %esi ;; cd: movq %r12, %rdi -;; d0: callq 0x26f +;; d0: callq 0x274 ;; d5: movq %r12, %rdi -;; d8: callq 0x29f +;; d8: callq 0x2a7 ;; dd: ud2 ;; df: movl $6, %esi ;; e4: movq %r12, %rdi -;; e7: callq 0x26f +;; e7: callq 0x274 ;; ec: movq %r12, %rdi -;; ef: callq 0x29f +;; ef: callq 0x2a7 ;; f4: ud2 ;; f6: movl $8, %esi ;; fb: movq %r12, %rdi -;; fe: callq 0x26f +;; fe: callq 0x274 ;; 103: movq %r12, %rdi -;; 106: callq 0x29f +;; 106: callq 0x2a7 ;; 10b: ud2 ;; 10d: xorl %esi, %esi ;; 10f: movq %r12, %rdi -;; 112: callq 0x26f +;; 112: callq 0x274 ;; 117: movq %r12, %rdi -;; 11a: callq 0x29f +;; 11a: callq 0x2a7 ;; 11f: ud2 ;; 121: ud2 ;; 123: ud2 diff --git a/tests/disas/winch/aarch64/call_indirect/call_indirect.wat b/tests/disas/winch/aarch64/call_indirect/call_indirect.wat index 731043475f29..c5b0b27f7364 100644 --- a/tests/disas/winch/aarch64/call_indirect/call_indirect.wat +++ b/tests/disas/winch/aarch64/call_indirect/call_indirect.wat @@ -85,7 +85,7 @@ ;; mov x0, x9 ;; mov x1, #0 ;; ldur w2, [x28] -;; bl #0x3e4 +;; bl #0x3f0 ;; e0: add x28, x28, #4 ;; mov sp, x28 ;; ldur x9, [x28, #0x14] @@ -153,7 +153,7 @@ ;; mov x0, x9 ;; mov x1, #0 ;; ldur w2, [x28, #0xc] -;; bl #0x3e4 +;; bl #0x3f0 ;; 1f0: add x28, x28, #0xc ;; mov sp, x28 ;; add x28, x28, #4 diff --git a/tests/disas/winch/aarch64/call_indirect/local_arg.wat b/tests/disas/winch/aarch64/call_indirect/local_arg.wat index b91b748f9ff9..3d546753ca91 100644 --- a/tests/disas/winch/aarch64/call_indirect/local_arg.wat +++ b/tests/disas/winch/aarch64/call_indirect/local_arg.wat @@ -91,7 +91,7 @@ ;; mov x0, x9 ;; mov x1, #0 ;; ldur w2, [x28] -;; bl #0x404 +;; bl #0x3fc ;; 120: add x28, x28, #4 ;; mov sp, x28 ;; ldur x9, [x28, #0x14] diff --git a/tests/disas/winch/x64/atomic/notify/notify.wat b/tests/disas/winch/x64/atomic/notify/notify.wat index 4f8e4e0e3d87..ea9cd72baf24 100644 --- a/tests/disas/winch/x64/atomic/notify/notify.wat +++ b/tests/disas/winch/x64/atomic/notify/notify.wat @@ -27,7 +27,7 @@ ;; movl $0, %esi ;; movq 8(%rsp), %rdx ;; movl 4(%rsp), %ecx -;; callq 0x175 +;; callq 0x178 ;; addq $4, %rsp ;; addq $0xc, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/notify/notify_offset.wat b/tests/disas/winch/x64/atomic/notify/notify_offset.wat index 5f5fd6c8d477..118128338629 100644 --- a/tests/disas/winch/x64/atomic/notify/notify_offset.wat +++ b/tests/disas/winch/x64/atomic/notify/notify_offset.wat @@ -28,7 +28,7 @@ ;; movl $0, %esi ;; movq 8(%rsp), %rdx ;; movl 4(%rsp), %ecx -;; callq 0x17c +;; callq 0x17f ;; addq $4, %rsp ;; addq $0xc, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait32.wat b/tests/disas/winch/x64/atomic/wait/wait32.wat index 2d018f2f7020..370c737bffad 100644 --- a/tests/disas/winch/x64/atomic/wait/wait32.wat +++ b/tests/disas/winch/x64/atomic/wait/wait32.wat @@ -30,7 +30,7 @@ ;; movq 0x18(%rsp), %rdx ;; movl 0x14(%rsp), %ecx ;; movq 0xc(%rsp), %r8 -;; callq 0x182 +;; callq 0x185 ;; addq $0xc, %rsp ;; addq $0x14, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait32_offset.wat b/tests/disas/winch/x64/atomic/wait/wait32_offset.wat index bca69bb45ea4..df22377fdbf7 100644 --- a/tests/disas/winch/x64/atomic/wait/wait32_offset.wat +++ b/tests/disas/winch/x64/atomic/wait/wait32_offset.wat @@ -34,7 +34,7 @@ ;; movq 0x18(%rsp), %rdx ;; movl 0x14(%rsp), %ecx ;; movq 0xc(%rsp), %r8 -;; callq 0x189 +;; callq 0x18c ;; addq $0xc, %rsp ;; addq $0x14, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait64.wat b/tests/disas/winch/x64/atomic/wait/wait64.wat index 92e9279f341d..81e3e669f958 100644 --- a/tests/disas/winch/x64/atomic/wait/wait64.wat +++ b/tests/disas/winch/x64/atomic/wait/wait64.wat @@ -29,7 +29,7 @@ ;; movq 0x18(%rsp), %rdx ;; movq 0x10(%rsp), %rcx ;; movq 8(%rsp), %r8 -;; callq 0x17a +;; callq 0x17d ;; addq $8, %rsp ;; addq $0x18, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait64_offset.wat b/tests/disas/winch/x64/atomic/wait/wait64_offset.wat index 60278a7855fd..ccfc59b65eaf 100644 --- a/tests/disas/winch/x64/atomic/wait/wait64_offset.wat +++ b/tests/disas/winch/x64/atomic/wait/wait64_offset.wat @@ -33,7 +33,7 @@ ;; movq 0x18(%rsp), %rdx ;; movq 0x10(%rsp), %rcx ;; movq 8(%rsp), %r8 -;; callq 0x181 +;; callq 0x184 ;; addq $8, %rsp ;; addq $0x18, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/call_indirect/call_indirect.wat b/tests/disas/winch/x64/call_indirect/call_indirect.wat index d6ac1776bdeb..7ec91a542ab5 100644 --- a/tests/disas/winch/x64/call_indirect/call_indirect.wat +++ b/tests/disas/winch/x64/call_indirect/call_indirect.wat @@ -76,7 +76,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 8(%rsp), %edx -;; callq 0x337 +;; callq 0x33e ;; addq $8, %rsp ;; addq $4, %rsp ;; movq 0x1c(%rsp), %r14 @@ -128,7 +128,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 4(%rsp), %edx -;; callq 0x337 +;; callq 0x33e ;; addq $4, %rsp ;; addq $4, %rsp ;; movq 0x20(%rsp), %r14 diff --git a/tests/disas/winch/x64/call_indirect/local_arg.wat b/tests/disas/winch/x64/call_indirect/local_arg.wat index a28a35c29d74..9925e43dd644 100644 --- a/tests/disas/winch/x64/call_indirect/local_arg.wat +++ b/tests/disas/winch/x64/call_indirect/local_arg.wat @@ -72,7 +72,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 8(%rsp), %edx -;; callq 0x32b +;; callq 0x327 ;; addq $8, %rsp ;; addq $4, %rsp ;; movq 0x1c(%rsp), %r14 diff --git a/tests/disas/winch/x64/epoch/func.wat b/tests/disas/winch/x64/epoch/func.wat index 87a203e4b34a..8d9197338527 100644 --- a/tests/disas/winch/x64/epoch/func.wat +++ b/tests/disas/winch/x64/epoch/func.wat @@ -23,7 +23,7 @@ ;; cmpq %rcx, %rdx ;; jb 0x54 ;; 47: movq %r14, %rdi -;; callq 0x13b +;; callq 0x13d ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp diff --git a/tests/disas/winch/x64/epoch/loop.wat b/tests/disas/winch/x64/epoch/loop.wat index 77ebfad29d5d..ee9c1baf770c 100644 --- a/tests/disas/winch/x64/epoch/loop.wat +++ b/tests/disas/winch/x64/epoch/loop.wat @@ -25,7 +25,7 @@ ;; cmpq %rcx, %rdx ;; jb 0x54 ;; 47: movq %r14, %rdi -;; callq 0x165 +;; callq 0x167 ;; movq 8(%rsp), %r14 ;; movq 0x18(%r14), %rdx ;; movq (%rdx), %rdx @@ -34,7 +34,7 @@ ;; cmpq %rcx, %rdx ;; jb 0x79 ;; 6c: movq %r14, %rdi -;; callq 0x165 +;; callq 0x167 ;; movq 8(%rsp), %r14 ;; jmp 0x54 ;; 7e: addq $0x10, %rsp diff --git a/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat b/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat index 69d565b6d88a..a1408fee0777 100644 --- a/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat +++ b/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f32_floor/f32_floor_param.wat b/tests/disas/winch/x64/f32_floor/f32_floor_param.wat index f9db481766f2..8f229d8dd115 100644 --- a/tests/disas/winch/x64/f32_floor/f32_floor_param.wat +++ b/tests/disas/winch/x64/f32_floor/f32_floor_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat b/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat index adc7fa0af72c..531ae020f6e5 100644 --- a/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat +++ b/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat b/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat index 9aebb3411a50..bf2b1a63a857 100644 --- a/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat +++ b/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat b/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat index cdc1b87b18ec..79aeb031e0f5 100644 --- a/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat +++ b/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_floor/f64_floor_param.wat b/tests/disas/winch/x64/f64_floor/f64_floor_param.wat index 2b4debf2fd5d..5465998a203c 100644 --- a/tests/disas/winch/x64/f64_floor/f64_floor_param.wat +++ b/tests/disas/winch/x64/f64_floor/f64_floor_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat b/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat index e78d9a01fe34..ca3cfe650e33 100644 --- a/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat +++ b/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat b/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat index 138fff26475a..093fc46dbe56 100644 --- a/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat +++ b/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xde ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/fuel/call.wat b/tests/disas/winch/x64/fuel/call.wat index efd069f7115c..5f554bb0dec1 100644 --- a/tests/disas/winch/x64/fuel/call.wat +++ b/tests/disas/winch/x64/fuel/call.wat @@ -28,7 +28,7 @@ ;; cmpq $0, %rcx ;; jl 0x5e ;; 51: movq %r14, %rdi -;; callq 0x1f5 +;; callq 0x1f7 ;; movq 8(%rsp), %r14 ;; movq 8(%r14), %rax ;; movq (%rax), %r11 @@ -74,7 +74,7 @@ ;; cmpq $0, %rcx ;; jl 0x10e ;; 101: movq %r14, %rdi -;; callq 0x1f5 +;; callq 0x1f7 ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp diff --git a/tests/disas/winch/x64/fuel/func.wat b/tests/disas/winch/x64/fuel/func.wat index 14ab5b247150..bef9b6adf2c0 100644 --- a/tests/disas/winch/x64/fuel/func.wat +++ b/tests/disas/winch/x64/fuel/func.wat @@ -24,7 +24,7 @@ ;; cmpq $0, %rcx ;; jl 0x5e ;; 51: movq %r14, %rdi -;; callq 0x145 +;; callq 0x147 ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp diff --git a/tests/disas/winch/x64/fuel/loop.wat b/tests/disas/winch/x64/fuel/loop.wat index 3487f1061f1a..3c29e8815dd4 100644 --- a/tests/disas/winch/x64/fuel/loop.wat +++ b/tests/disas/winch/x64/fuel/loop.wat @@ -26,14 +26,14 @@ ;; cmpq $0, %rcx ;; jl 0x5e ;; 51: movq %r14, %rdi -;; callq 0x179 +;; callq 0x17b ;; movq 8(%rsp), %r14 ;; movq 8(%r14), %rcx ;; movq (%rcx), %rcx ;; cmpq $0, %rcx ;; jl 0x7c ;; 6f: movq %r14, %rdi -;; callq 0x179 +;; callq 0x17b ;; movq 8(%rsp), %r14 ;; movq 8(%r14), %rax ;; movq (%rax), %r11 diff --git a/tests/disas/winch/x64/load/grow_load.wat b/tests/disas/winch/x64/load/grow_load.wat index f3ed3d2d7836..206968c2d1f1 100644 --- a/tests/disas/winch/x64/load/grow_load.wat +++ b/tests/disas/winch/x64/load/grow_load.wat @@ -65,7 +65,7 @@ ;; movq %r14, %rdi ;; movl 0xc(%rsp), %esi ;; movl $0, %edx -;; callq 0x2e2 +;; callq 0x2e5 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x58(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/fill.wat b/tests/disas/winch/x64/table/fill.wat index a58d41451a60..bd663b63e6ea 100644 --- a/tests/disas/winch/x64/table/fill.wat +++ b/tests/disas/winch/x64/table/fill.wat @@ -113,7 +113,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 0xc(%rsp), %edx -;; callq 0x4ee +;; callq 0x4f7 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x28(%rsp), %r14 @@ -133,7 +133,7 @@ ;; movl 0xc(%rsp), %edx ;; movq 4(%rsp), %rcx ;; movl (%rsp), %r8d -;; callq 0x51a +;; callq 0x526 ;; addq $0x10, %rsp ;; movq 0x28(%rsp), %r14 ;; addq $0x30, %rsp diff --git a/tests/disas/winch/x64/table/get.wat b/tests/disas/winch/x64/table/get.wat index 03febe2ed81d..f47517eb7999 100644 --- a/tests/disas/winch/x64/table/get.wat +++ b/tests/disas/winch/x64/table/get.wat @@ -65,7 +65,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 0xc(%rsp), %edx -;; callq 0x2ef +;; callq 0x2f8 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/grow.wat b/tests/disas/winch/x64/table/grow.wat index a6dfa4e1a530..86be26a28c6a 100644 --- a/tests/disas/winch/x64/table/grow.wat +++ b/tests/disas/winch/x64/table/grow.wat @@ -30,7 +30,7 @@ ;; movl $0, %esi ;; movl $0xa, %edx ;; movq 8(%rsp), %rcx -;; callq 0x178 +;; callq 0x180 ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/init_copy_drop.wat b/tests/disas/winch/x64/table/init_copy_drop.wat index dab3c0e6ac37..f4fed0127c88 100644 --- a/tests/disas/winch/x64/table/init_copy_drop.wat +++ b/tests/disas/winch/x64/table/init_copy_drop.wat @@ -142,11 +142,11 @@ ;; movl $7, %ecx ;; movl $0, %r8d ;; movl $4, %r9d -;; callq 0x94a +;; callq 0x95a ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $1, %esi -;; callq 0x995 +;; callq 0x9a9 ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -154,11 +154,11 @@ ;; movl $0xf, %ecx ;; movl $1, %r8d ;; movl $3, %r9d -;; callq 0x94a +;; callq 0x95a ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $3, %esi -;; callq 0x995 +;; callq 0x9a9 ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -166,7 +166,7 @@ ;; movl $0x14, %ecx ;; movl $0xf, %r8d ;; movl $5, %r9d -;; callq 0x8ff +;; callq 0x90b ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -174,7 +174,7 @@ ;; movl $0x15, %ecx ;; movl $0x1d, %r8d ;; movl $1, %r9d -;; callq 0x8ff +;; callq 0x90b ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -182,7 +182,7 @@ ;; movl $0x18, %ecx ;; movl $0xa, %r8d ;; movl $1, %r9d -;; callq 0x8ff +;; callq 0x90b ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -190,7 +190,7 @@ ;; movl $0xd, %ecx ;; movl $0xb, %r8d ;; movl $4, %r9d -;; callq 0x8ff +;; callq 0x90b ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -198,7 +198,7 @@ ;; movl $0x13, %ecx ;; movl $0x14, %r8d ;; movl $5, %r9d -;; callq 0x8ff +;; callq 0x90b ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp @@ -243,7 +243,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 0xc(%rsp), %edx -;; callq 0x9c0 +;; callq 0x9d7 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/set.wat b/tests/disas/winch/x64/table/set.wat index 2ae1255aee14..9252b525f030 100644 --- a/tests/disas/winch/x64/table/set.wat +++ b/tests/disas/winch/x64/table/set.wat @@ -109,7 +109,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 8(%rsp), %edx -;; callq 0x4ba +;; callq 0x4c0 ;; addq $8, %rsp ;; addq $4, %rsp ;; movq 0x1c(%rsp), %r14