| // SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com> |
| // SPDX-License-Identifier: MIT OR Apache-2.0 |
| |
| use core::fmt; |
| |
| use super::{ |
| address::{PhysicalAddress, VirtualAddress}, |
| TranslationGranule, XlatError, |
| }; |
| |
| #[derive(PartialEq)] |
| pub struct Block { |
| pub pa: PhysicalAddress, |
| pub va: VirtualAddress, |
| pub size: usize, |
| } |
| |
| impl Block { |
| pub fn new(pa: PhysicalAddress, va: VirtualAddress, size: usize) -> Self { |
| Self { pa, va, size } |
| } |
| } |
| |
| impl fmt::Debug for Block { |
| fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| f.debug_struct("Block") |
| .field("pa", &format_args!("{:#010x}", self.pa.0)) |
| .field("va", &format_args!("{:#010x}", self.va.0)) |
| .field("size", &format_args!("{:#010x}", self.size)) |
| .finish() |
| } |
| } |
| |
| pub struct BlockIterator<const VA_BITS: usize> { |
| pa: PhysicalAddress, |
| va: VirtualAddress, |
| length: usize, |
| granule: TranslationGranule<VA_BITS>, |
| } |
| |
| impl<const VA_BITS: usize> BlockIterator<VA_BITS> { |
| pub fn new( |
| pa: PhysicalAddress, |
| va: VirtualAddress, |
| length: usize, |
| granule: TranslationGranule<VA_BITS>, |
| ) -> Result<Self, XlatError> { |
| let min_granule_mask = granule.block_size_at_level(3) - 1; |
| |
| if length == 0 { |
| return Err(XlatError::InvalidParameterError("Length cannot be 0")); |
| } |
| |
| if (pa.0 | va.0 | length) & min_granule_mask != 0 { |
| return Err(XlatError::AlignmentError(pa, va, length, min_granule_mask)); |
| } |
| |
| Ok(Self { |
| pa, |
| va, |
| length, |
| granule, |
| }) |
| } |
| } |
| |
| impl<const VA_BITS: usize> Iterator for BlockIterator<VA_BITS> { |
| type Item = Block; |
| |
| fn next(&mut self) -> Option<Self::Item> { |
| if self.length > 0 { |
| let initial_lookup_level = self.granule.initial_lookup_level(); |
| |
| for block_size in |
| (initial_lookup_level..=3).map(|level| self.granule.block_size_at_level(level)) |
| { |
| if (self.pa.0 | self.va.0) & (block_size - 1) == 0 && self.length >= block_size { |
| let block = Block::new(self.pa, self.va, block_size); |
| |
| self.pa = self.pa.add_offset(block_size).unwrap(); |
| self.va = self.va.add_offset(block_size).unwrap(); |
| self.length -= block_size; |
| |
| return Some(block); |
| } |
| } |
| } |
| |
| None |
| } |
| } |
| |
| #[cfg(test)] |
| mod tests { |
| use super::*; |
| |
| macro_rules! test_block { |
| ( $pa:expr, $va:expr, $size:literal, $blocks:expr ) => { |
| assert_eq!( |
| Block::new(PhysicalAddress($pa), VirtualAddress($va), $size), |
| $blocks |
| ); |
| }; |
| } |
| |
| #[test] |
| fn test_block_iterator() { |
| let mut blocks = BlockIterator::new( |
| PhysicalAddress(0x3fff_c000), |
| VirtualAddress(0x3fff_c000), |
| 0x4020_5000, |
| TranslationGranule::<36>::Granule4k, |
| ) |
| .unwrap(); |
| test_block!(0x3fff_c000, 0x3fff_c000, 0x1000, blocks.next().unwrap()); |
| test_block!(0x3fff_d000, 0x3fff_d000, 0x1000, blocks.next().unwrap()); |
| test_block!(0x3fff_e000, 0x3fff_e000, 0x1000, blocks.next().unwrap()); |
| test_block!(0x3fff_f000, 0x3fff_f000, 0x1000, blocks.next().unwrap()); |
| test_block!( |
| 0x4000_0000, |
| 0x4000_0000, |
| 0x4000_0000, |
| blocks.next().unwrap() |
| ); |
| test_block!( |
| 0x8000_0000, |
| 0x8000_0000, |
| 0x0020_0000, |
| blocks.next().unwrap() |
| ); |
| test_block!(0x8020_0000, 0x8020_0000, 0x1000, blocks.next().unwrap()); |
| } |
| |
| #[test] |
| fn test_block_iterator_unaligned() { |
| let blocks = BlockIterator::new( |
| PhysicalAddress(0x3fff_c000), |
| VirtualAddress(0x3f20_0000), |
| 0x200000, |
| TranslationGranule::<36>::Granule4k, |
| ) |
| .unwrap(); |
| for (i, block) in blocks.enumerate().take(512) { |
| test_block!( |
| 0x3fff_c000 + (i << 12), |
| 0x3f20_0000 + (i << 12), |
| 0x1000, |
| block |
| ); |
| } |
| } |
| } |