diff --git a/lib/compiler/aro_translate_c/ast.zig b/lib/compiler/aro_translate_c/ast.zig index f23be5b93df1..e5660a36d9b3 100644 --- a/lib/compiler/aro_translate_c/ast.zig +++ b/lib/compiler/aro_translate_c/ast.zig @@ -192,6 +192,8 @@ pub const Node = extern union { array_type, null_sentinel_array_type, + /// @import("std").zig.c_translation.Volatile(operand) + helpers_volatile, /// @import("std").zig.c_translation.sizeof(operand) helpers_sizeof, /// @import("std").zig.c_translation.FlexibleArrayType(lhs, rhs) @@ -286,6 +288,7 @@ pub const Node = extern union { .const_cast, .volatile_cast, .vector_zero_init, + .helpers_volatile, => Payload.UnOp, .add, @@ -935,6 +938,11 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const import_node = try renderStdImport(c, &.{ "zig", "c_translation", "shuffleVectorIndex" }); return renderCall(c, import_node, &.{ payload.lhs, payload.rhs }); }, + .helpers_volatile => { + const payload = node.castTag(.helpers_volatile).?.data; + const import_node = try renderStdImport(c, &.{ "zig", "c_translation", "Volatile" }); + return renderCall(c, import_node, &.{payload}); + }, .vector => { const payload = node.castTag(.vector).?.data; return renderBuiltinCall(c, "@Vector", &.{ payload.lhs, payload.rhs }); @@ -2356,6 +2364,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .helpers_promoteIntLiteral, .helpers_shuffle_vector_index, .helpers_flexible_array_type, + .helpers_volatile, .std_mem_zeroinit, .integer_literal, .float_literal, diff --git a/lib/std/zig/c_translation.zig b/lib/std/zig/c_translation.zig index 813b409c903c..5de58eaeb014 100644 --- a/lib/std/zig/c_translation.zig +++ b/lib/std/zig/c_translation.zig @@ -366,6 +366,42 @@ test "Flexible Array Type" { try testing.expectEqual(FlexibleArrayType(*const volatile Container, c_int), [*c]const volatile c_int); } +pub fn Volatile(comptime T: type) type { + return extern struct { + inner: T = std.mem.zeroes(T), + + pub inline fn ptr(v: *volatile Volatile(T)) *volatile T { + return @ptrCast(v); + } + + pub inline fn constPtr(v: *const volatile Volatile(T)) *const volatile T { + return @ptrCast(v); + } + + pub inline fn load(v: *const volatile Volatile(T)) T { + return v.constPtr().*; + } + + pub inline fn store(v: *volatile Volatile(T), value: T) void { + v.ptr().* = value; + } + }; +} + +test "Volatile" { + try testing.expectEqual(@sizeOf(Volatile(c_int)), @sizeOf(c_int)); + try testing.expectEqual(@alignOf(Volatile(c_int)), @alignOf(c_int)); + + try testing.expectEqual(@sizeOf([7]Volatile(c_int)), @sizeOf([7]c_int)); + try testing.expectEqual(@alignOf([7]Volatile(c_int)), @alignOf([7]c_int)); + + try testing.expectEqual(@sizeOf(Volatile(u32)), @sizeOf(u32)); + try testing.expectEqual(@alignOf(Volatile(u32)), @alignOf(u32)); + + try testing.expectEqual(@sizeOf(Volatile(u64)), @sizeOf(u64)); + try testing.expectEqual(@alignOf(Volatile(u64)), @alignOf(u64)); +} + /// C `%` operator for signed integers /// C standard states: "If the quotient a/b is representable, the expression (a/b)*b + a%b shall equal a" /// The quotient is not representable if denominator is zero, or if numerator is the minimum integer for diff --git a/src/clang.zig b/src/clang.zig index c12daca2bce5..c8355df5cff4 100644 --- a/src/clang.zig +++ b/src/clang.zig @@ -32,6 +32,9 @@ pub const QualType = extern struct { pub const isVolatileQualified = ZigClangQualType_isVolatileQualified; extern fn ZigClangQualType_isVolatileQualified(QualType) bool; + pub const isLocalVolatileQualified = ZigClangQualType_isLocalVolatileQualified; + extern fn ZigClangQualType_isLocalVolatileQualified(QualType) bool; + pub const isRestrictQualified = ZigClangQualType_isRestrictQualified; extern fn ZigClangQualType_isRestrictQualified(QualType) bool; }; diff --git a/src/translate_c.zig b/src/translate_c.zig index 19a2fde13cee..006c7d8ccc1c 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -697,10 +697,10 @@ fn transTypeDef(c: *Context, scope: *Scope, typedef_decl: *const clang.TypedefNa if (builtin_typedef_map.get(name)) |builtin| { return c.decl_table.putNoClobber(c.gpa, @intFromPtr(typedef_decl.getCanonicalDecl()), builtin); } + const child_qt = typedef_decl.getUnderlyingType(); if (!toplevel) name = try bs.makeMangledName(c, name); try c.decl_table.putNoClobber(c.gpa, @intFromPtr(typedef_decl.getCanonicalDecl()), name); - const child_qt = typedef_decl.getUnderlyingType(); const typedef_loc = typedef_decl.getLocation(); const init_node = transQualType(c, scope, child_qt, typedef_loc) catch |err| switch (err) { error.UnsupportedType => { @@ -3379,6 +3379,186 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: return maybeSuppressResult(c, used, res); } +fn restoreCVQualifiers( + c: *Context, + scope: *Scope, + pointee_ty: clang.QualType, + ptr_node: Node, + source_location: clang.SourceLocation, +) TransError!Node { + const pointer_ty = try Tag.c_pointer.create(c.arena, .{ + .is_const = pointee_ty.isConstQualified(), + .is_volatile = pointee_ty.isVolatileQualified(), + .elem_type = try transType(c, scope, pointee_ty.getTypePtr(), source_location), + }); + return try Tag.as.create(c.arena, .{ + .lhs = pointer_ty, + .rhs = try Tag.ptr_cast.create(c.arena, ptr_node), + }); +} + +/// Check if this type is a reference to a typedef to a volatile-qualified value-type. +/// If so, its zig type is `std.zig.c_translation.Volatile(T)`. +fn isVolatileTypedef(ty: clang.QualType) bool { + if (!ty.isVolatileQualified()) { + return false; + } + var base_ty: clang.QualType = ty; + while (base_ty.getTypeClass() == .Elaborated) { + const elaborated = @as(*const clang.ElaboratedType, @ptrCast(base_ty.getTypePtr())); + base_ty = elaborated.getNamedType(); + } + + if (base_ty.getTypeClass() == .Typedef) { + const typedef_ty = @as(*const clang.TypedefType, @ptrCast(base_ty.getTypePtr())); + const underlying_ty = typedef_ty.getDecl().getUnderlyingType(); + return underlying_ty.isVolatileQualified(); + } + return false; +} + +/// Checks if the expression in `node` is a dereference added by translate-c to maintain type qualifiers. +/// Intended to be used when taking the address of an expression, to check if the sub-expression has +/// a redundant dereference that did not appear in the original C source and can simply be removed +/// to yield the 'address-of' result. +fn maybeCollapseAddressOf(c: *Context, expr: *const clang.Expr, expr_ty: clang.QualType, node: Node) ?Node { + // TODO(theofabilous): should we even be doing this? + // TODO(theofabilous): obviously suboptimal approach. Is there a way to achieve this in a bottom-up + // manner without modifiying too many call-sites? This is very ad-hoc and modifying a lot of code + // just for this seems counter-intuitive. + const ptr: *ast.Payload.UnOp = node.castTag(.deref) orelse return null; + var subexpr = expr; + var stmt_class = expr.getStmtClass(); + sw: switch (stmt_class) { + .UnaryOperatorClass => { + // Don't collapse the dereference if there was an actual + // dereference in the source code + const un_op = @as(*const clang.UnaryOperator, @ptrCast(subexpr)); + const opcode = un_op.getOpcode(); + if (opcode == .Deref) return null; + }, + .ParenExprClass => { + const paren_expr = @as(*const clang.ParenExpr, @ptrCast(subexpr)); + subexpr = paren_expr.getSubExpr(); + stmt_class = subexpr.getStmtClass(); + continue :sw stmt_class; + }, + else => {}, + } + const ptr_node = ptr.data; + switch (ptr_node.tag()) { + .call => { + // x.ptr().* + // x.constPtr().* + if (!isVolatileTypedef(expr_ty)) { + std.debug.print("{} {}\n", .{ expr.getStmtClass(), expr.getType().getTypeClass() }); + return null; + } + const call_pl: *ast.Payload.Call = ptr_node.castTag(.call).?; + if (call_pl.data.args.len > 0) { + return null; + } + const func_pl: *ast.Payload.FieldAccess = call_pl.data.lhs.castTag(.field_access) orelse return null; + const is_const_ptr = is_const_ptr: { + if (std.mem.eql(u8, func_pl.data.field_name, "constPtr")) { + break :is_const_ptr true; + } else if (std.mem.eql(u8, func_pl.data.field_name, "ptr")) { + break :is_const_ptr false; + } else return null; + }; + _ = is_const_ptr; + // TODO(theofabilous): is it actually ok to destroy here? + defer c.arena.destroy(@as(*ast.Node.Tag.deref.Type(), @ptrCast(@alignCast(node.ptr_otherwise)))); + return ptr.data; + }, + .as => { + // @as(..., @ptrCast(...)).* + if (!expr_ty.isVolatileQualified() and !expr_ty.isConstQualified()) { + return null; + } + const as_pl: *ast.Payload.BinOp = ptr_node.castTag(.as).?; + const value_operand = as_pl.data.rhs; + const ptr_cast_pl: *ast.Payload.UnOp = value_operand.castTag(.ptr_cast) orelse return null; + // TODO(theofabilous): is it actually ok to destroy here? + defer c.arena.destroy(@as(*ast.Node.Tag.as.Type(), @ptrCast(@alignCast(ptr_node.ptr_otherwise)))); + defer c.arena.destroy(@as(*ast.Node.Tag.deref.Type(), @ptrCast(@alignCast(node.ptr_otherwise)))); + return ptr_cast_pl.data; + }, + else => return null, + } +} + +/// Take the address of a volatile-qualified value and return a volatile pointer to that value. +fn qualifiedAddressOf( + c: *Context, + scope: *Scope, + pointee_expr: *const clang.Expr, + expr_ty: clang.QualType, + node: Node, +) TransError!Node { + assert(expr_ty.isVolatileQualified()); + const ptr_node = ptr_node: { + if (maybeCollapseAddressOf(c, pointee_expr, expr_ty, node)) |ptr| { + break :ptr_node ptr; + } + var base_ty: clang.QualType = expr_ty; + while (base_ty.getTypeClass() == .Elaborated) { + const elaborated = @as(*const clang.ElaboratedType, @ptrCast(base_ty.getTypePtr())); + base_ty = elaborated.getNamedType(); + } + if (isVolatileTypedef(base_ty)) { + const load_field = if (expr_ty.isConstQualified()) + try Tag.field_access.create(c.arena, .{ .lhs = node, .field_name = "constPtr" }) + else + try Tag.field_access.create(c.arena, .{ .lhs = node, .field_name = "ptr" }); + return try Tag.call.create(c.arena, .{ .lhs = load_field, .args = &.{} }); + } + // If the type is not std.zig.c_translation.Volatile(T), take its address and explicitly + // cast the pointer with the qualifiers of the pointee + const address_of_node = try Tag.address_of.create(c.arena, node); + break :ptr_node address_of_node; + }; + return try restoreCVQualifiers(c, scope, expr_ty, ptr_node, pointee_expr.getBeginLoc()); +} + +/// Ensure a volatile-qualifed value is volatile-loaded. Extra book-keeping is required for such values because +/// no equivalent concept exists in zig. The general strategy is to correctly apply the qualifiers whenever the +/// address of a qualified value-type is taken, at which point the type can be correctly represented in zig. +/// +/// Generally, this is only meaningful when the value itself has been previously obtained via a pointer somehow: +/// - directly, via a normal volatile pointer (volatile T *ptr) +/// - indirectly, via the address of an extern volatile-qualified value-type +/// - indirectly, via a volatile qualified struct field through a pointer to a (possibly non-volatile) struct +/// and taking the field's address. +/// +/// For example, a pointer to a non-volatile struct with a volatile field can be volatile-loaded by first +/// loading the field by value, taking the address, casting the resulting pointer to a volatile pointer, +/// and dereferencing that, all within the same expression. This works because of zig's field pointer +/// propogation semantics. +fn volatileLoad( + c: *Context, + scope: *Scope, + expr: *const clang.Expr, + expr_ty: clang.QualType, + node: Node, +) TransError!Node { + var base_ty: clang.QualType = expr_ty; + while (base_ty.getTypeClass() == .Elaborated) { + const elaborated = @as(*const clang.ElaboratedType, @ptrCast(base_ty.getTypePtr())); + base_ty = elaborated.getNamedType(); + } + // If the element type is an array, don't apply qualifiers (yet). In C, an array cannot be legally + // used by-value, and anything that looks like a by-value obtention of a whole array isn't *actually* + // a load -- we don't want to falsely annotate this as a volatile load which cannot be optimized away. + // Presumably, this expression will later be dereferenced or decay to a pointer, at which point + // pointer qualifiers will be applied. + if (base_ty.getTypeClass() == .ConstantArray) { + return node; + } + const ptr = try qualifiedAddressOf(c, scope, expr, expr_ty, node); + return try Tag.deref.create(c.arena, ptr); +} + fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!Node { var container_node = try transExpr(c, scope, stmt.getBase(), .used); if (stmt.isArrow()) { @@ -3404,6 +3584,17 @@ fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, re var node = try Tag.field_access.create(c.arena, .{ .lhs = container_node, .field_name = name }); if (exprIsFlexibleArrayRef(c, @as(*const clang.Expr, @ptrCast(stmt)))) { node = try Tag.call.create(c.arena, .{ .lhs = node, .args = &.{} }); + } else if (stmt.isArrow()) { + // TODO(theofabilous): function types, opaque demotion? + // If accessing a field with the `->` operator and the field is volatile, this is equivalent + // to dereferencing a pointer-to-volatile of that field. To maintain the C semantics, take + // the address of the field, apply the volatile qualifiers, and dereference through that + // pointer. + const expr = @as(*const clang.Expr, @ptrCast(stmt)); + const expr_ty = expr.getType(); + if (expr_ty.isVolatileQualified()) { + node = try volatileLoad(c, scope, expr, expr_ty, node); + } } return maybeSuppressResult(c, result_used, node); } @@ -3417,6 +3608,7 @@ fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, re fn transSignedArrayAccess( c: *Context, scope: *Scope, + stmt: *const clang.ArraySubscriptExpr, container_expr: *const clang.Expr, subscr_expr: *const clang.Expr, result_used: ResultUsed, @@ -3481,7 +3673,13 @@ fn transSignedArrayAccess( try block_scope.statements.append(if_node); const block_node = try block_scope.complete(c); - const derefed = try Tag.deref.create(c.arena, block_node); + var node = block_node; + const expr = @as(*const clang.Expr, @ptrCast(stmt)); + const expr_ty = expr.getType(); + if (expr_ty.isConstQualified() or expr_ty.isVolatileQualified()) { + node = try restoreCVQualifiers(c, scope, expr_ty, node, expr.getBeginLoc()); + } + const derefed = try Tag.deref.create(c.arena, node); return maybeSuppressResult(c, result_used, derefed); } @@ -3511,7 +3709,7 @@ fn transArrayAccess(c: *Context, scope: *Scope, stmt: *const clang.ArraySubscrip // Special case: actual pointer (not decayed array) and signed integer subscript // See discussion at https://github.com/ziglang/zig/pull/8589 if (is_signed and (base_stmt == unwrapped_base) and !is_vector and !is_nonnegative_int_literal) - return transSignedArrayAccess(c, scope, base_stmt, subscr_expr, result_used); + return transSignedArrayAccess(c, scope, stmt, base_stmt, subscr_expr, result_used); const container_node = try transExpr(c, scope, unwrapped_base, .used); const rhs = if (is_longlong or is_signed) blk: { @@ -3526,10 +3724,15 @@ fn transArrayAccess(c: *Context, scope: *Scope, stmt: *const clang.ArraySubscrip }); } else try transExpr(c, scope, subscr_expr, .used); - const node = try Tag.array_access.create(c.arena, .{ + var node = try Tag.array_access.create(c.arena, .{ .lhs = container_node, .rhs = rhs, }); + const expr = @as(*const clang.Expr, @ptrCast(stmt)); + const expr_ty = expr.getType(); + if (expr_ty.isVolatileQualified()) { + node = try volatileLoad(c, scope, expr, expr_ty, node); + } return maybeSuppressResult(c, result_used, node); } @@ -3714,17 +3917,37 @@ fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperat else return transCreatePreCrement(c, scope, stmt, .sub_assign, used), .AddrOf => { - return Tag.address_of.create(c.arena, try transExpr(c, scope, op_expr, used)); + const expr = @as(*const clang.Expr, @ptrCast(stmt)); + var op_ty = op_expr.getType(); + const op_node = try transExpr(c, scope, op_expr, used); + if (op_ty.isVolatileQualified() or op_ty.isConstQualified()) { + const force_const = expr.getType().getTypePtr().getPointeeType().isConstQualified(); + if (force_const) { + op_ty.addConst(); + } + return qualifiedAddressOf(c, scope, op_expr, op_ty, op_node); + } else { + return try Tag.address_of.create(c.arena, op_node); + } }, .Deref => { if (qualTypeWasDemotedToOpaque(c, stmt.getType())) return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "cannot dereference opaque type", .{}); - const node = try transExpr(c, scope, op_expr, used); + var node = try transExpr(c, scope, op_expr, used); var is_ptr = false; const fn_ty = qualTypeGetFnProto(op_expr.getType(), &is_ptr); if (fn_ty != null and is_ptr) return node; + const expr = @as(*const clang.Expr, @ptrCast(stmt)); + var expr_ty = expr.getType(); + if (expr_ty.isVolatileQualified() or expr_ty.isConstQualified()) { + const force_const = op_expr.getType().getTypePtr().getPointeeType().isConstQualified(); + if (force_const) { + expr_ty.addConst(); + } + node = try restoreCVQualifiers(c, scope, expr_ty, node, expr.getBeginLoc()); + } return Tag.deref.create(c.arena, node); }, .Plus => return transExpr(c, scope, op_expr, used), @@ -4255,7 +4478,22 @@ fn transQualTypeInitialized( } fn transQualType(c: *Context, scope: *Scope, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!Node { - return transType(c, scope, qt.getTypePtr(), source_loc); + const node = try transType(c, scope, qt.getTypePtr(), source_loc); + if (qt.isVolatileQualified()) { + switch (qt.getTypeClass()) { + .Builtin, + .Record, + .Enum, + => return try Tag.helpers_volatile.create(c.arena, node), + .Elaborated, .Typedef => { + if (qt.isLocalVolatileQualified()) { + return try Tag.helpers_volatile.create(c.arena, node); + } + }, + else => {}, + } + } + return node; } /// Produces a Zig AST node by translating a Clang QualType, respecting the width, but modifying the signed-ness. @@ -4780,7 +5018,7 @@ fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clan const is_fn_proto = qualTypeChildIsFnProto(child_qt); const is_const = is_fn_proto or child_qt.isConstQualified(); const is_volatile = child_qt.isVolatileQualified(); - const elem_type = try transQualType(c, scope, child_qt, source_loc); + const elem_type = try transType(c, scope, child_qt.getTypePtr(), source_loc); const ptr_info: @FieldType(ast.Payload.Pointer, "data") = .{ .is_const = is_const, .is_volatile = is_volatile, @@ -4803,7 +5041,7 @@ fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clan const_arr_ty.getSize(&size_ap_int); defer size_ap_int.free(); const size = size_ap_int.getLimitedValue(usize); - const elem_type = try transType(c, scope, const_arr_ty.getElementType().getTypePtr(), source_loc); + const elem_type = try transQualType(c, scope, const_arr_ty.getElementType(), source_loc); return Tag.array_type.create(c.arena, .{ .len = size, .elem_type = elem_type }); }, diff --git a/src/zig_clang.cpp b/src/zig_clang.cpp index 92485a6a353c..4db81807f008 100644 --- a/src/zig_clang.cpp +++ b/src/zig_clang.cpp @@ -2789,6 +2789,11 @@ bool ZigClangQualType_isVolatileQualified(ZigClangQualType self) { return qt.isVolatileQualified(); } +bool ZigClangQualType_isLocalVolatileQualified(ZigClangQualType self) { + clang::QualType qt = bitcast(self); + return qt.isLocalVolatileQualified(); +} + bool ZigClangQualType_isRestrictQualified(ZigClangQualType self) { clang::QualType qt = bitcast(self); return qt.isRestrictQualified(); diff --git a/src/zig_clang.h b/src/zig_clang.h index e2b6c3c2f657..080405f2a3a1 100644 --- a/src/zig_clang.h +++ b/src/zig_clang.h @@ -1479,6 +1479,7 @@ ZIG_EXTERN_C void ZigClangQualType_addConst(struct ZigClangQualType *); ZIG_EXTERN_C bool ZigClangQualType_eq(struct ZigClangQualType, struct ZigClangQualType); ZIG_EXTERN_C bool ZigClangQualType_isConstQualified(struct ZigClangQualType); ZIG_EXTERN_C bool ZigClangQualType_isVolatileQualified(struct ZigClangQualType); +ZIG_EXTERN_C bool ZigClangQualType_isLocalVolatileQualified(struct ZigClangQualType); ZIG_EXTERN_C bool ZigClangQualType_isRestrictQualified(struct ZigClangQualType); ZIG_EXTERN_C enum ZigClangTypeClass ZigClangType_getTypeClass(const struct ZigClangType *self); diff --git a/test/cases/translate_c/cv_qualified_value.c b/test/cases/translate_c/cv_qualified_value.c new file mode 100644 index 000000000000..33a0220e9fed --- /dev/null +++ b/test/cases/translate_c/cv_qualified_value.c @@ -0,0 +1,120 @@ +typedef unsigned int uint; + +typedef volatile int mmio_int; +typedef volatile uint mmio_uint; +typedef mmio_int *mmio_int_ptr; + +typedef struct { + mmio_int reg; + mmio_uint regu; + mmio_int regs[4]; + mmio_int regm[2][2]; + volatile int regx; + mmio_int_ptr reg_ptr; +} hw_t; + +extern hw_t *hw; +extern hw_t hw_arr[4]; + +extern const mmio_int reg; +extern mmio_int *regs; + +static int hw_reg(void) { + const hw_t *chw = (const hw_t *)(0xd0000000); + (void) *(&chw->regx); + (void) chw->regx; + (void) *(&hw->reg); + hw->reg = 0; + + (void) hw->regu; + hw->regu = 0; + + return hw->reg; +} + +static typeof(&hw->reg) hw_reg_ptr(void) { + return &hw->reg; +} + +static mmio_int_ptr hw_ptr(void) { + return hw->reg_ptr; +} + +static typeof(®) reg_ptr(void) { + return ® +} + +static int hw_regs_0(void) { + return hw->regs[0u]; +} + +static int hw_0_regs_0(void) { + return hw_arr[0u].regs[0u]; +} + +static int hw_regm_00(void) { + return hw->regm[0u][0u]; +} + +static int hw_regm_0_deref(void) { + return *(hw->regm[0u]); +} + +static int ptr_arith(void) { + return *(regs+1u); +} + +// translate-c +// c_frontend=clang +// +// pub const uint = c_uint; +// pub const mmio_int = @import("std").zig.c_translation.Volatile(c_int); +// pub const mmio_uint = @import("std").zig.c_translation.Volatile(uint); +// pub const mmio_int_ptr = [*c]volatile mmio_int; +// pub const hw_t = extern struct { +// reg: mmio_int = @import("std").mem.zeroes(mmio_int), +// regu: mmio_uint = @import("std").mem.zeroes(mmio_uint), +// regs: [4]mmio_int = @import("std").mem.zeroes([4]mmio_int), +// regm: [2][2]mmio_int = @import("std").mem.zeroes([2][2]mmio_int), +// regx: @import("std").zig.c_translation.Volatile(c_int) = @import("std").mem.zeroes(@import("std").zig.c_translation.Volatile(c_int)), +// reg_ptr: mmio_int_ptr = @import("std").mem.zeroes(mmio_int_ptr), +// }; +// pub extern var hw: [*c]hw_t; +// pub extern var hw_arr: [4]hw_t; +// pub extern const reg: mmio_int; +// pub extern var regs: [*c]volatile mmio_int; +// pub fn hw_reg() callconv(.c) c_int { +// var chw: [*c]const hw_t = @as([*c]const hw_t, @ptrFromInt(@as(c_uint, 3489660928))); +// _ = &chw; +// _ = @as([*c]const volatile c_int, @ptrCast(@as([*c]const volatile c_int, @ptrCast(&chw.*.regx)))).*; +// _ = @as([*c]const volatile c_int, @ptrCast(&chw.*.regx)).*; +// _ = @as([*c]volatile mmio_int, @ptrCast(@as([*c]volatile mmio_int, @ptrCast(hw.*.reg.ptr())))).*; +// hw.*.reg.ptr().* = 0; +// _ = hw.*.regu.ptr().*; +// hw.*.regu.ptr().* = 0; +// return hw.*.reg.ptr().*; +// } +// pub fn hw_reg_ptr() callconv(.c) @TypeOf(@as([*c]volatile mmio_int, @ptrCast(hw.*.reg.ptr()))) { +// return @as([*c]volatile mmio_int, @ptrCast(hw.*.reg.ptr())); +// } +// pub fn hw_ptr() callconv(.c) mmio_int_ptr { +// return hw.*.reg_ptr; +// } +// pub fn reg_ptr() callconv(.c) @TypeOf(reg.constPtr()) { +// return reg.constPtr(); +// } +// pub fn hw_regs_0() callconv(.c) c_int { +// return hw.*.regs[@as(c_uint, 0)].ptr().*; +// } +// pub fn hw_0_regs_0() callconv(.c) c_int { +// return hw_arr[@as(c_uint, 0)].regs[@as(c_uint, 0)].ptr().*; +// } +// pub fn hw_regm_00() callconv(.c) c_int { +// return hw.*.regm[@as(c_uint, 0)][@as(c_uint, 0)].ptr().*; +// } +// pub fn hw_regm_0_deref() callconv(.c) c_int { +// return @as([*c]volatile mmio_int, @ptrCast(@as([*c]volatile mmio_int, @ptrCast(@alignCast(&hw.*.regm[@as(c_uint, 0)]))))).*; +// } +// pub fn ptr_arith() callconv(.c) c_int { +// return @as([*c]volatile mmio_int, @ptrCast(regs + @as(c_uint, 1))).*; +// }