| // SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com> |
| // SPDX-License-Identifier: MIT OR Apache-2.0 |
| |
| use core::{fmt, ops::Range}; |
| |
| use crate::TranslationRegime; |
| |
| use super::TranslationGranule; |
| |
| #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] |
| pub struct PhysicalAddress(pub(super) usize); |
| |
| impl PhysicalAddress { |
| /// Create a new PhysicalAddress from the raw address value |
| /// |
| /// # Safety |
| /// The address has to be a valid physical address |
| pub const unsafe fn new(address: usize) -> Self { |
| Self(address) |
| } |
| |
| pub const fn add_offset(self, offset: usize) -> Option<Self> { |
| if let Some(address) = self.0.checked_add(offset) { |
| Some(Self(address)) |
| } else { |
| None |
| } |
| } |
| |
| pub const fn identity_va(self) -> VirtualAddress { |
| VirtualAddress(self.0) |
| } |
| |
| pub const fn diff(self, rhs: Self) -> Option<usize> { |
| self.0.checked_sub(rhs.0) |
| } |
| } |
| |
| impl From<PhysicalAddress> for usize { |
| fn from(value: PhysicalAddress) -> Self { |
| value.0 |
| } |
| } |
| |
| impl From<PhysicalAddress> for u64 { |
| fn from(value: PhysicalAddress) -> Self { |
| value.0 as u64 |
| } |
| } |
| |
| impl fmt::Debug for PhysicalAddress { |
| fn fmt(&self, f: &mut fmt::Formatter<'_>) -> core::fmt::Result { |
| f.debug_tuple("PA") |
| .field(&format_args!("{:#x}", self.0)) |
| .finish() |
| } |
| } |
| |
| #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] |
| pub struct VirtualAddress(pub(super) usize); |
| |
| impl VirtualAddress { |
| /// Create a new VirtualAddress from the raw address value |
| /// |
| /// # Safety |
| /// The address has to be a valid virtual address |
| pub const unsafe fn new(address: usize) -> Self { |
| Self(address) |
| } |
| |
| pub const fn add_offset(self, offset: usize) -> Option<Self> { |
| if let Some(address) = self.0.checked_add(offset) { |
| Some(Self(address)) |
| } else { |
| None |
| } |
| } |
| |
| pub const fn identity_pa(self) -> PhysicalAddress { |
| PhysicalAddress(self.0) |
| } |
| |
| pub const fn mask_for_level<const VA_BITS: usize>( |
| self, |
| translation_granule: TranslationGranule<VA_BITS>, |
| level: isize, |
| ) -> Self { |
| Self(self.0 & (translation_granule.block_size_at_level(level) - 1)) |
| } |
| |
| pub const fn get_level_index<const VA_BITS: usize>( |
| self, |
| translation_granule: TranslationGranule<VA_BITS>, |
| level: isize, |
| ) -> usize { |
| self.0 >> translation_granule.total_bits_at_level(level) |
| } |
| |
| pub fn is_valid_in_regime<const VA_BITS: usize>(&self, regime: TranslationRegime) -> bool { |
| let mask = Self::get_upper_bit_mask::<VA_BITS>(); |
| let required_upper_bits = if regime.is_upper_va_range() { mask } else { 0 }; |
| |
| (self.0 & mask) == required_upper_bits |
| } |
| |
| pub fn set_upper_bits<const VA_BITS: usize>(self, regime: TranslationRegime) -> Self { |
| let mask = Self::get_upper_bit_mask::<VA_BITS>(); |
| |
| Self(if regime.is_upper_va_range() { |
| self.0 | mask |
| } else { |
| self.0 & !mask |
| }) |
| } |
| |
| pub fn remove_upper_bits<const VA_BITS: usize>(self) -> Self { |
| Self(self.0 & !Self::get_upper_bit_mask::<VA_BITS>()) |
| } |
| |
| pub const fn mask_bits(self, mask: usize) -> Self { |
| Self(self.0 & mask) |
| } |
| |
| pub const fn diff(self, rhs: Self) -> Option<usize> { |
| self.0.checked_sub(rhs.0) |
| } |
| |
| pub const fn align_up(self, alignment: usize) -> Self { |
| Self(self.0.next_multiple_of(alignment)) |
| } |
| |
| const fn get_upper_bit_mask<const VA_BITS: usize>() -> usize { |
| !((1 << VA_BITS) - 1) |
| } |
| } |
| |
| impl From<VirtualAddress> for usize { |
| fn from(value: VirtualAddress) -> Self { |
| value.0 |
| } |
| } |
| |
| impl From<VirtualAddress> for u64 { |
| fn from(value: VirtualAddress) -> Self { |
| value.0 as u64 |
| } |
| } |
| |
| impl fmt::Debug for VirtualAddress { |
| fn fmt(&self, f: &mut fmt::Formatter<'_>) -> core::fmt::Result { |
| f.debug_tuple("VA") |
| .field(&format_args!("{:#x}", self.0)) |
| .finish() |
| } |
| } |
| |
| #[derive(Debug)] |
| pub struct VirtualAddressRange { |
| pub(super) start: VirtualAddress, |
| pub(super) end: VirtualAddress, |
| } |
| |
| impl VirtualAddressRange { |
| pub fn new(start: VirtualAddress, end: VirtualAddress) -> Self { |
| Self { start, end } |
| } |
| |
| /// Create a new VirtualAddressRange from the raw address values |
| /// |
| /// # Safety |
| /// The addresses have to be valid virtual addresses |
| pub unsafe fn from_range(value: Range<usize>) -> Self { |
| Self::new( |
| VirtualAddress::new(value.start), |
| VirtualAddress::new(value.end), |
| ) |
| } |
| |
| pub fn len(&self) -> Option<usize> { |
| self.end.diff(self.start) |
| } |
| |
| pub fn step_by(self, step: usize) -> VirtualAddressIterator { |
| VirtualAddressIterator { |
| next: self.start, |
| end: self.end, |
| step, |
| } |
| } |
| } |
| |
| pub struct VirtualAddressIterator { |
| next: VirtualAddress, |
| end: VirtualAddress, |
| step: usize, |
| } |
| |
| impl Iterator for VirtualAddressIterator { |
| type Item = VirtualAddress; |
| |
| fn next(&mut self) -> Option<Self::Item> { |
| if self.next < self.end { |
| let current = self.next; |
| |
| self.next = if let Some(next) = self.next.add_offset(self.step) { |
| next |
| } else { |
| self.end |
| }; |
| |
| Some(current) |
| } else { |
| None |
| } |
| } |
| } |