| // SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com> |
| // SPDX-License-Identifier: MIT OR Apache-2.0 |
| |
| use core::ops::Range; |
| |
| use super::TranslationGranule; |
| |
| #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] |
| pub struct PhysicalAddress(pub(super) usize); |
| |
| impl PhysicalAddress { |
| pub const unsafe fn new(address: usize) -> Self { |
| Self(address) |
| } |
| |
| pub const fn add_offset(self, offset: usize) -> Option<Self> { |
| if let Some(address) = self.0.checked_add(offset) { |
| Some(Self(address)) |
| } else { |
| None |
| } |
| } |
| |
| pub const fn identity_va(self) -> VirtualAddress { |
| VirtualAddress(self.0) |
| } |
| |
| pub const fn diff(self, rhs: Self) -> Option<usize> { |
| self.0.checked_sub(rhs.0) |
| } |
| } |
| |
| impl From<PhysicalAddress> for usize { |
| fn from(value: PhysicalAddress) -> Self { |
| value.0 |
| } |
| } |
| |
| impl From<PhysicalAddress> for u64 { |
| fn from(value: PhysicalAddress) -> Self { |
| value.0 as u64 |
| } |
| } |
| |
| #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] |
| pub struct VirtualAddress(pub(super) usize); |
| |
| impl VirtualAddress { |
| pub const unsafe fn new(address: usize) -> Self { |
| Self(address) |
| } |
| |
| pub const fn add_offset(self, offset: usize) -> Option<Self> { |
| if let Some(address) = self.0.checked_add(offset) { |
| Some(Self(address)) |
| } else { |
| None |
| } |
| } |
| |
| pub const fn identity_pa(self) -> PhysicalAddress { |
| PhysicalAddress(self.0) |
| } |
| |
| pub const fn mask_for_level<const VA_BITS: usize>( |
| self, |
| translation_granule: TranslationGranule<VA_BITS>, |
| level: isize, |
| ) -> Self { |
| Self(self.0 & (translation_granule.block_size_at_level(level) - 1)) |
| } |
| |
| pub const fn get_level_index<const VA_BITS: usize>( |
| self, |
| translation_granule: TranslationGranule<VA_BITS>, |
| level: isize, |
| ) -> usize { |
| self.0 >> translation_granule.total_bits_at_level(level) |
| } |
| |
| pub const fn mask_bits(self, mask: usize) -> Self { |
| Self(self.0 & mask) |
| } |
| |
| pub const fn diff(self, rhs: Self) -> Option<usize> { |
| self.0.checked_sub(rhs.0) |
| } |
| |
| pub const fn align_up(self, alignment: usize) -> Self { |
| Self(self.0.next_multiple_of(alignment)) |
| } |
| } |
| |
| impl From<VirtualAddress> for usize { |
| fn from(value: VirtualAddress) -> Self { |
| value.0 |
| } |
| } |
| |
| impl From<VirtualAddress> for u64 { |
| fn from(value: VirtualAddress) -> Self { |
| value.0 as u64 |
| } |
| } |
| |
| pub struct VirtualAddressRange { |
| pub(super) start: VirtualAddress, |
| pub(super) end: VirtualAddress, |
| } |
| |
| impl VirtualAddressRange { |
| pub fn new(start: VirtualAddress, end: VirtualAddress) -> Self { |
| Self { start, end } |
| } |
| |
| pub unsafe fn from_range(value: Range<usize>) -> Self { |
| Self::new( |
| VirtualAddress::new(value.start), |
| VirtualAddress::new(value.end), |
| ) |
| } |
| |
| pub fn len(&self) -> Option<usize> { |
| self.end.diff(self.start) |
| } |
| |
| pub fn step_by(self, step: usize) -> VirtualAddressIterator { |
| VirtualAddressIterator { |
| next: self.start, |
| end: self.end, |
| step, |
| } |
| } |
| } |
| |
| pub struct VirtualAddressIterator { |
| next: VirtualAddress, |
| end: VirtualAddress, |
| step: usize, |
| } |
| |
| impl Iterator for VirtualAddressIterator { |
| type Item = VirtualAddress; |
| |
| fn next(&mut self) -> Option<Self::Item> { |
| if self.next < self.end { |
| let current = self.next; |
| |
| self.next = if let Some(next) = self.next.add_offset(self.step) { |
| next |
| } else { |
| self.end |
| }; |
| |
| Some(current) |
| } else { |
| None |
| } |
| } |
| } |