11//! Module containing abstracts for dealing with contiguous regions of guest memory
22
33use crate :: bitmap:: { Bitmap , BS } ;
4- use crate :: guest_memory:: Error ;
54use crate :: guest_memory:: Result ;
65use crate :: {
7- Address , Bytes , FileOffset , GuestAddress , GuestMemory , GuestUsize , MemoryRegionAddress ,
8- VolatileSlice ,
6+ Address , AtomicAccess , Bytes , FileOffset , GuestAddress , GuestMemory , GuestMemoryError ,
7+ GuestUsize , MemoryRegionAddress , ReadVolatile , VolatileSlice , WriteVolatile ,
98} ;
9+ use std:: sync:: atomic:: Ordering ;
1010use std:: sync:: Arc ;
1111
1212/// Represents a continuous region of guest physical memory.
1313#[ allow( clippy:: len_without_is_empty) ]
14- pub trait GuestMemoryRegion : Bytes < MemoryRegionAddress , E = Error > {
14+ pub trait GuestMemoryRegion : Bytes < MemoryRegionAddress , E = GuestMemoryError > {
1515 /// Type used for dirty memory tracking.
1616 type B : Bitmap ;
1717
@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
7373 /// Rust memory safety model. It's the caller's responsibility to ensure that there's no
7474 /// concurrent accesses to the underlying guest memory.
7575 fn get_host_address ( & self , _addr : MemoryRegionAddress ) -> Result < * mut u8 > {
76- Err ( Error :: HostAddressNotAvailable )
76+ Err ( GuestMemoryError :: HostAddressNotAvailable )
7777 }
7878
7979 /// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
8989 offset : MemoryRegionAddress ,
9090 count : usize ,
9191 ) -> Result < VolatileSlice < BS < Self :: B > > > {
92- Err ( Error :: HostAddressNotAvailable )
92+ Err ( GuestMemoryError :: HostAddressNotAvailable )
9393 }
9494
9595 /// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,153 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299299 self . regions . iter ( ) . map ( AsRef :: as_ref)
300300 }
301301}
302+
303+ /// A marker trait that if implemented on a type `R` makes available a default
304+ /// implementation of `Bytes<MemoryRegionAddress>` for `R`, based on the assumption
305+ /// that the entire `GuestMemoryRegion` is just traditional memory without any
306+ /// special access requirements.
307+ pub trait GuestMemoryRegionBytes : GuestMemoryRegion { }
308+
309+ impl < R : GuestMemoryRegionBytes > Bytes < MemoryRegionAddress > for R {
310+ type E = GuestMemoryError ;
311+
312+ /// # Examples
313+ /// * Write a slice at guest address 0x1200.
314+ ///
315+ /// ```
316+ /// # #[cfg(feature = "backend-mmap")]
317+ /// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
318+ /// #
319+ /// # #[cfg(feature = "backend-mmap")]
320+ /// # {
321+ /// # let start_addr = GuestAddress(0x1000);
322+ /// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
323+ /// # .expect("Could not create guest memory");
324+ /// #
325+ /// let res = gm
326+ /// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
327+ /// .expect("Could not write to guest memory");
328+ /// assert_eq!(5, res);
329+ /// # }
330+ /// ```
331+ fn write ( & self , buf : & [ u8 ] , addr : MemoryRegionAddress ) -> Result < usize > {
332+ let maddr = addr. raw_value ( ) as usize ;
333+ self . as_volatile_slice ( ) ?
334+ . write ( buf, maddr)
335+ . map_err ( Into :: into)
336+ }
337+
338+ /// # Examples
339+ /// * Read a slice of length 16 at guestaddress 0x1200.
340+ ///
341+ /// ```
342+ /// # #[cfg(feature = "backend-mmap")]
343+ /// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
344+ /// #
345+ /// # #[cfg(feature = "backend-mmap")]
346+ /// # {
347+ /// # let start_addr = GuestAddress(0x1000);
348+ /// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
349+ /// # .expect("Could not create guest memory");
350+ /// #
351+ /// let buf = &mut [0u8; 16];
352+ /// let res = gm
353+ /// .read(buf, GuestAddress(0x1200))
354+ /// .expect("Could not read from guest memory");
355+ /// assert_eq!(16, res);
356+ /// # }
357+ /// ```
358+ fn read ( & self , buf : & mut [ u8 ] , addr : MemoryRegionAddress ) -> Result < usize > {
359+ let maddr = addr. raw_value ( ) as usize ;
360+ self . as_volatile_slice ( ) ?
361+ . read ( buf, maddr)
362+ . map_err ( Into :: into)
363+ }
364+
365+ fn write_slice ( & self , buf : & [ u8 ] , addr : MemoryRegionAddress ) -> Result < ( ) > {
366+ let maddr = addr. raw_value ( ) as usize ;
367+ self . as_volatile_slice ( ) ?
368+ . write_slice ( buf, maddr)
369+ . map_err ( Into :: into)
370+ }
371+
372+ fn read_slice ( & self , buf : & mut [ u8 ] , addr : MemoryRegionAddress ) -> Result < ( ) > {
373+ let maddr = addr. raw_value ( ) as usize ;
374+ self . as_volatile_slice ( ) ?
375+ . read_slice ( buf, maddr)
376+ . map_err ( Into :: into)
377+ }
378+
379+ fn read_volatile_from < F > (
380+ & self ,
381+ addr : MemoryRegionAddress ,
382+ src : & mut F ,
383+ count : usize ,
384+ ) -> Result < usize >
385+ where
386+ F : ReadVolatile ,
387+ {
388+ self . as_volatile_slice ( ) ?
389+ . read_volatile_from ( addr. 0 as usize , src, count)
390+ . map_err ( Into :: into)
391+ }
392+
393+ fn read_exact_volatile_from < F > (
394+ & self ,
395+ addr : MemoryRegionAddress ,
396+ src : & mut F ,
397+ count : usize ,
398+ ) -> Result < ( ) >
399+ where
400+ F : ReadVolatile ,
401+ {
402+ self . as_volatile_slice ( ) ?
403+ . read_exact_volatile_from ( addr. 0 as usize , src, count)
404+ . map_err ( Into :: into)
405+ }
406+
407+ fn write_volatile_to < F > (
408+ & self ,
409+ addr : MemoryRegionAddress ,
410+ dst : & mut F ,
411+ count : usize ,
412+ ) -> Result < usize >
413+ where
414+ F : WriteVolatile ,
415+ {
416+ self . as_volatile_slice ( ) ?
417+ . write_volatile_to ( addr. 0 as usize , dst, count)
418+ . map_err ( Into :: into)
419+ }
420+
421+ fn write_all_volatile_to < F > (
422+ & self ,
423+ addr : MemoryRegionAddress ,
424+ dst : & mut F ,
425+ count : usize ,
426+ ) -> Result < ( ) >
427+ where
428+ F : WriteVolatile ,
429+ {
430+ self . as_volatile_slice ( ) ?
431+ . write_all_volatile_to ( addr. 0 as usize , dst, count)
432+ . map_err ( Into :: into)
433+ }
434+
435+ fn store < T : AtomicAccess > (
436+ & self ,
437+ val : T ,
438+ addr : MemoryRegionAddress ,
439+ order : Ordering ,
440+ ) -> Result < ( ) > {
441+ self . as_volatile_slice ( ) . and_then ( |s| {
442+ s. store ( val, addr. raw_value ( ) as usize , order)
443+ . map_err ( Into :: into)
444+ } )
445+ }
446+
447+ fn load < T : AtomicAccess > ( & self , addr : MemoryRegionAddress , order : Ordering ) -> Result < T > {
448+ self . as_volatile_slice ( )
449+ . and_then ( |s| s. load ( addr. raw_value ( ) as usize , order) . map_err ( Into :: into) )
450+ }
451+ }
0 commit comments