// Copyright 2024 Red Hat, Inc. // Author(s): Paolo Bonzini // SPDX-License-Identifier: GPL-2.0-or-later //! Bindings for `MemoryRegion`, `MemoryRegionOps` and `MemTxAttrs` use std::{ ffi::{CStr, CString}, marker::{PhantomData, PhantomPinned}, os::raw::{c_uint, c_void}, ptr::addr_of, }; pub use bindings::{hwaddr, MemTxAttrs}; use crate::{ bindings::{self, device_endian, memory_region_init_io}, callbacks::FnCall, prelude::*, zeroable::Zeroable, }; pub struct MemoryRegionOps( bindings::MemoryRegionOps, // Note: quite often you'll see PhantomData mentioned when discussing // covariance and contravariance; you don't need any of those to understand // this usage of PhantomData. Quite simply, MemoryRegionOps *logically* // holds callbacks that take an argument of type &T, except the type is erased // before the callback is stored in the bindings::MemoryRegionOps field. // The argument of PhantomData is a function pointer in order to represent // that relationship; while that will also provide desirable and safe variance // for T, variance is not the point but just a consequence. PhantomData, ); // SAFETY: When a *const T is passed to the callbacks, the call itself // is done in a thread-safe manner. The invocation is okay as long as // T itself is `Sync`. unsafe impl Sync for MemoryRegionOps {} #[derive(Clone)] pub struct MemoryRegionOpsBuilder(bindings::MemoryRegionOps, PhantomData); unsafe extern "C" fn memory_region_ops_read_cb FnCall<(&'a T, hwaddr, u32), u64>>( opaque: *mut c_void, addr: hwaddr, size: c_uint, ) -> u64 { F::call((unsafe { &*(opaque.cast::()) }, addr, size)) } unsafe extern "C" fn memory_region_ops_write_cb FnCall<(&'a T, hwaddr, u64, u32)>>( opaque: *mut c_void, addr: hwaddr, data: u64, size: c_uint, ) { F::call((unsafe { &*(opaque.cast::()) }, addr, data, size)) } impl MemoryRegionOpsBuilder { #[must_use] pub const fn read FnCall<(&'a T, hwaddr, u32), u64>>(mut self, _f: &F) -> Self { self.0.read = Some(memory_region_ops_read_cb::); self } #[must_use] pub const fn write FnCall<(&'a T, hwaddr, u64, u32)>>(mut self, _f: &F) -> Self { self.0.write = Some(memory_region_ops_write_cb::); self } #[must_use] pub const fn big_endian(mut self) -> Self { self.0.endianness = device_endian::DEVICE_BIG_ENDIAN; self } #[must_use] pub const fn little_endian(mut self) -> Self { self.0.endianness = device_endian::DEVICE_LITTLE_ENDIAN; self } #[must_use] pub const fn native_endian(mut self) -> Self { self.0.endianness = device_endian::DEVICE_NATIVE_ENDIAN; self } #[must_use] pub const fn valid_sizes(mut self, min: u32, max: u32) -> Self { self.0.valid.min_access_size = min; self.0.valid.max_access_size = max; self } #[must_use] pub const fn valid_unaligned(mut self) -> Self { self.0.valid.unaligned = true; self } #[must_use] pub const fn impl_sizes(mut self, min: u32, max: u32) -> Self { self.0.impl_.min_access_size = min; self.0.impl_.max_access_size = max; self } #[must_use] pub const fn impl_unaligned(mut self) -> Self { self.0.impl_.unaligned = true; self } #[must_use] pub const fn build(self) -> MemoryRegionOps { MemoryRegionOps::(self.0, PhantomData) } #[must_use] pub const fn new() -> Self { Self(bindings::MemoryRegionOps::ZERO, PhantomData) } } impl Default for MemoryRegionOpsBuilder { fn default() -> Self { Self::new() } } /// A safe wrapper around [`bindings::MemoryRegion`]. Compared to the /// underlying C struct it is marked as pinned because the QOM tree /// contains a pointer to it. pub struct MemoryRegion { inner: bindings::MemoryRegion, _pin: PhantomPinned, } impl MemoryRegion { // inline to ensure that it is not included in tests, which only // link to hwcore and qom. FIXME: inlining is actually the opposite // of what we want, since this is the type-erased version of the // init_io function below. Look into splitting the qemu_api crate. #[inline(always)] unsafe fn do_init_io( slot: *mut bindings::MemoryRegion, owner: *mut Object, ops: &'static bindings::MemoryRegionOps, name: &'static str, size: u64, ) { unsafe { let cstr = CString::new(name).unwrap(); memory_region_init_io( slot, owner.cast::(), ops, owner.cast::(), cstr.as_ptr(), size, ); } } pub fn init_io>( &mut self, owner: *mut T, ops: &'static MemoryRegionOps, name: &'static str, size: u64, ) { unsafe { Self::do_init_io(&mut self.inner, owner.cast::(), &ops.0, name, size); } } pub(crate) const fn as_mut_ptr(&self) -> *mut bindings::MemoryRegion { addr_of!(self.inner) as *mut _ } } unsafe impl ObjectType for MemoryRegion { type Class = bindings::MemoryRegionClass; const TYPE_NAME: &'static CStr = unsafe { CStr::from_bytes_with_nul_unchecked(bindings::TYPE_MEMORY_REGION) }; } qom_isa!(MemoryRegion: Object); /// A special `MemTxAttrs` constant, used to indicate that no memory /// attributes are specified. /// /// Bus masters which don't specify any attributes will get this, /// which has all attribute bits clear except the topmost one /// (so that we can distinguish "all attributes deliberately clear" /// from "didn't specify" if necessary). pub const MEMTXATTRS_UNSPECIFIED: MemTxAttrs = MemTxAttrs { unspecified: true, ..Zeroable::ZERO };