Open
Description
This Rust program (which runs fine under MIRI) produces different results in GCC debug mode(result same as LLVM) and GCC release mode(incorrect result).
The sample is currently ~140 LOC, I am working on reducing it further.
#![recursion_limit = "1024"]
#![feature(custom_mir, core_intrinsics, lazy_get)]
#![allow(unused_parens, unused_assignments, overflowing_literals)]
extern crate core;
use core::intrinsics::mir::*;
use std::fmt::Debug;
#[inline(never)]
fn dump_var(
f: usize,
var0: usize,
val0: impl Debug,
var1: usize,
val1: impl Debug,
var2: usize,
val2: impl Debug,
var3: usize,
val3: impl Debug,
) {
println!(
"fn{f}:_{var0} = {val0:?}\n_{var1} = {val1:?}\n_{var2} = {val2:?}\n_{var3} = {val3:?}"
);
}
pub fn fn2(
mut _1: *mut isize,
mut _2: i16,
mut _3: i32,
mut _4: u32,
mut _5: u32,
mut _6: isize,
mut _7: i8,
mut _8: isize,
mut _9: u32,
mut _10: bool,
) -> isize {
let mut RET: isize = Default::default();
let mut _11: u16 = Default::default();
let mut _12: (f32, (), u64) = Default::default();
let mut _15: u64 = Default::default();
let mut _16: f64 = Default::default();
let mut _17: (Adt38, *mut i8, f32, u16) = (Default::default(), core::ptr::null_mut(), 0.0, 0);
let mut _33: () = Default::default();
_4 = !_9;
_1 = core::ptr::addr_of_mut!(_8);
_9 = _4;
_11 = 50919;
RET = !_8;
_10 = false;
_4 = _5;
_12.0 = 0.0;
_7 = (-5965270870520108484_i64) as i8;
loop {
_12.2 = (-164714684559875379919518575010379717139_i128) as u64;
unsafe {
_17.2 = 0.0;
_17.0.fld0 = _10;
_17.3 = !_11;
(*_1) = -RET;
_1 = core::ptr::addr_of_mut!(_6);
_4 = !_9;
_17.1 = core::ptr::addr_of_mut!(_7);
_15 = _12.2 << _8;
_16 = (-3256106608463376368_i64) as f64;
_8 = RET >> _3;
match _5 {
0 => {
_9 = _4;
(*_1) = _6;
_11 = '\u{bc6e7}' as u16;
RET = !(*_1);
(*_1) = -RET;
_10 = false;
(*_1) = _6 | RET;
_12.2 = (-164714684559875379919518575010379717139_i128) as u64;
continue;
}
1705677637 => {
(*_1) = 1;
(*_1) = 0;
_17.2 = _12.0 + _12.0;
_12.0 = _17.2 * _17.2;
_3 = (-2040533717_i32) | 1920924768_i32;
_11 = _17.3;
_1 = core::ptr::addr_of_mut!(_6);
_17.0.fld1 = _11 as u8;
_6 = _8;
_6 = 112574652318580291556652859739937967926_i128 as isize;
_6 = !_8;
_6 = _8;
_6 = _8 << _17.0.fld1;
_6 = _8 + RET;
_6 = fn3(_17.0, (_17));
}
_ => return RET,
}
};
_4 = _5;
_5 = _5 & _9;
_9 = _5 & _9;
_17.0.fld0 = !_10;
_17.2 = 0.0;
_17.1 = core::ptr::addr_of_mut!(_7);
_3 = core::hint::black_box(-1922871724_i32) * 995968542_i32;
_17.2 = _11 as f32;
match _4 {
1705677637 => return RET,
_ => continue,
}
}
}
#[inline(never)]
pub fn fn3(mut arg1: Adt38, mut arg2: (Adt38, *mut i8, f32, u16)) -> isize {
let mut tmp: (i64, i8) = Default::default();
let _33 = !(arg1.fld1) as isize;
arg2.1 = core::ptr::addr_of_mut!(tmp.1);
dump_var(3_usize, 44_usize, (), 44_usize, (), 33_usize, (_33), 44_usize, ());
return 1;
}
pub fn main() {
let mut _13 = '\u{9f647}' as isize;
let _1 = core::ptr::addr_of_mut!(_13);
let a = true;
let _2 = '\u{f2e32}' as i16;
let _3 = 1705677637_u32;
let _11 = !(-69_isize);
let _6 = (-579835010_i32) & (-913890743_i32);
let _4 = _2 as i8;
println!("{:?}", _11);
fn2(_1, _2, _6, _3, _3, _11, _4, _11, _3, a);
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Adt38 {
fld0: bool,
fld1: u8,
}
Some interesting observations: it seems like the problem is related to pointers, and the control flow is different in release GCC(a "decoy" block gets executed, while it should never be).