Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions cranelift/codegen/src/inst_predicates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,12 @@ pub fn has_memory_fence_semantics(op: Opcode) -> bool {
| Opcode::Debugtrap
| Opcode::SequencePoint => true,
Opcode::Call | Opcode::CallIndirect | Opcode::TryCall | Opcode::TryCallIndirect => true,
// N.B.: this is *load-bearing for borrow safety and
// provenance in Wasmtime*. A trapping op can potentially
// cause an implicit hostcall, and that hostcall implicitly
// mutably borrows Wasmtime's Store. So we can't allow alias
// anslysis to cross trapping opcodes; they are implicitly
// as-if they called the host.
op if op.can_trap() => true,
_ => false,
}
Expand Down
26 changes: 19 additions & 7 deletions crates/cranelift/src/bounds_checks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
use crate::{
Reachability,
func_environ::FuncEnvironment,
translate::{HeapData, TargetEnvironment},
translate::{FuncTranslationStacks, HeapData, TargetEnvironment},
};
use Reachability::*;
use cranelift_codegen::{
Expand Down Expand Up @@ -84,12 +84,15 @@ pub fn bounds_check_and_compute_addr(
index: ir::Value,
bounds_check: BoundsCheck,
trap: ir::TrapCode,
stacks: &FuncTranslationStacks,
) -> Reachability<ir::Value> {
match bounds_check {
BoundsCheck::StaticOffset {
offset,
access_size,
} => bounds_check_field_access(builder, env, heap, index, offset, access_size, trap),
} => {
bounds_check_field_access(builder, env, heap, index, offset, access_size, trap, stacks)
}

#[cfg(feature = "gc")]
BoundsCheck::StaticObjectField {
Expand All @@ -113,6 +116,7 @@ pub fn bounds_check_and_compute_addr(
0,
object_size,
trap,
stacks,
) {
Reachable(v) => v,
u @ Unreachable => return u,
Expand All @@ -123,7 +127,7 @@ pub fn bounds_check_and_compute_addr(
}

// Otherwise, bounds check just this one field's access.
bounds_check_field_access(builder, env, heap, index, offset, access_size, trap)
bounds_check_field_access(builder, env, heap, index, offset, access_size, trap, stacks)
}

// Compute the index of the end of the object, bounds check that and get
Expand All @@ -148,6 +152,7 @@ pub fn bounds_check_and_compute_addr(
0,
0,
trap,
stacks,
) {
Reachable(v) => v,
u @ Unreachable => return u,
Expand Down Expand Up @@ -177,6 +182,7 @@ fn bounds_check_field_access(
offset: u32,
access_size: u8,
trap: ir::TrapCode,
stacks: &FuncTranslationStacks,
) -> Reachability<ir::Value> {
let pointer_bit_width = u16::try_from(env.pointer_type().bits()).unwrap();
let bound_gv = heap.bound;
Expand Down Expand Up @@ -298,7 +304,7 @@ fn bounds_check_field_access(
// max_memory_size`, since we will end up being out-of-bounds regardless
// of the given `index`.
env.before_unconditionally_trapping_memory_access(builder);
env.trap(builder, trap);
env.trap(builder, trap, stacks);
return Unreachable;
}

Expand All @@ -308,7 +314,7 @@ fn bounds_check_field_access(
// native pointer type anyway, so this is an unconditional trap.
if pointer_bit_width < 64 && offset_and_size >= (1 << pointer_bit_width) {
env.before_unconditionally_trapping_memory_access(builder);
env.trap(builder, trap);
env.trap(builder, trap, stacks);
return Unreachable;
}

Expand Down Expand Up @@ -430,6 +436,7 @@ fn bounds_check_field_access(
AddrPcc::static32(heap.pcc_memory_type, memory_reservation),
oob,
trap,
stacks,
));
}

Expand Down Expand Up @@ -464,6 +471,7 @@ fn bounds_check_field_access(
AddrPcc::dynamic(heap.pcc_memory_type, bound_gv),
oob,
trap,
stacks,
));
}

Expand Down Expand Up @@ -513,6 +521,7 @@ fn bounds_check_field_access(
AddrPcc::dynamic(heap.pcc_memory_type, bound_gv),
oob,
trap,
stacks,
));
}

Expand Down Expand Up @@ -558,6 +567,7 @@ fn bounds_check_field_access(
AddrPcc::dynamic(heap.pcc_memory_type, bound_gv),
oob,
trap,
stacks,
));
}

Expand All @@ -575,7 +585,7 @@ fn bounds_check_field_access(
builder.func.dfg.facts[access_size_val] =
Some(Fact::constant(pointer_bit_width, offset_and_size));
}
let adjusted_index = env.uadd_overflow_trap(builder, index, access_size_val, trap);
let adjusted_index = env.uadd_overflow_trap(builder, index, access_size_val, trap, stacks);
if pcc {
builder.func.dfg.facts[adjusted_index] = Some(Fact::value_offset(
pointer_bit_width,
Expand Down Expand Up @@ -603,6 +613,7 @@ fn bounds_check_field_access(
AddrPcc::dynamic(heap.pcc_memory_type, bound_gv),
oob,
trap,
stacks,
))
}

Expand Down Expand Up @@ -756,9 +767,10 @@ fn explicit_check_oob_condition_and_compute_addr(
// in bounds (and therefore we can proceed).
oob_condition: ir::Value,
trap: ir::TrapCode,
stacks: &FuncTranslationStacks,
) -> ir::Value {
if let OobBehavior::ExplicitTrap = oob_behavior {
env.trapnz(builder, oob_condition, trap);
env.trapnz(builder, oob_condition, trap, stacks);
}
let addr_ty = env.pointer_type();

Expand Down
12 changes: 7 additions & 5 deletions crates/cranelift/src/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1620,15 +1620,17 @@ fn save_last_wasm_exit_fp_and_pc(
ptr: &impl PtrSize,
limits: Value,
) {
// Save the trampoline FP to the limits. Exception unwind needs
// this so that it can know the SP (bottom of frame) for the very
// last Wasm frame.
// Save the Wasm frame exit FP to the limits. We have the
// trampoline FP here; load the next FP in the chain.
let trampoline_fp = builder.ins().get_frame_pointer(pointer_type);
let wasm_fp = builder
.ins()
.load(pointer_type, MemFlags::trusted(), trampoline_fp, 0);
builder.ins().store(
MemFlags::trusted(),
trampoline_fp,
wasm_fp,
limits,
ptr.vmstore_context_last_wasm_exit_trampoline_fp(),
ptr.vmstore_context_last_wasm_exit_fp(),
);

// Finally save the Wasm return address to the limits.
Expand Down
Loading