blob: f1b5220b50ba593b79ccb83703b189007ea09dba [file] [log] [blame]
Imre Kisd5b96fd2024-09-11 17:04:32 +02001// SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com>
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use core::ops::Range;
5
Imre Kis631127d2024-11-21 13:09:01 +01006use super::TranslationGranule;
Imre Kisd5b96fd2024-09-11 17:04:32 +02007
8#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
9pub struct PhysicalAddress(pub(super) usize);
10
11impl PhysicalAddress {
12 pub const unsafe fn new(address: usize) -> Self {
13 Self(address)
14 }
15
16 pub const fn add_offset(self, offset: usize) -> Option<Self> {
17 if let Some(address) = self.0.checked_add(offset) {
18 Some(Self(address))
19 } else {
20 None
21 }
22 }
23
24 pub const fn identity_va(self) -> VirtualAddress {
25 VirtualAddress(self.0)
26 }
27
28 pub const fn diff(self, rhs: Self) -> Option<usize> {
29 self.0.checked_sub(rhs.0)
30 }
31}
32
33impl From<PhysicalAddress> for usize {
34 fn from(value: PhysicalAddress) -> Self {
35 value.0
36 }
37}
38
39impl From<PhysicalAddress> for u64 {
40 fn from(value: PhysicalAddress) -> Self {
41 value.0 as u64
42 }
43}
44
45#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
46pub struct VirtualAddress(pub(super) usize);
47
48impl VirtualAddress {
49 pub const unsafe fn new(address: usize) -> Self {
50 Self(address)
51 }
52
53 pub const fn add_offset(self, offset: usize) -> Option<Self> {
54 if let Some(address) = self.0.checked_add(offset) {
55 Some(Self(address))
56 } else {
57 None
58 }
59 }
60
61 pub const fn identity_pa(self) -> PhysicalAddress {
62 PhysicalAddress(self.0)
63 }
64
Imre Kis631127d2024-11-21 13:09:01 +010065 pub const fn mask_for_level<const VA_BITS: usize>(
66 self,
67 translation_granule: TranslationGranule<VA_BITS>,
68 level: isize,
69 ) -> Self {
70 Self(self.0 & (translation_granule.block_size_at_level(level) - 1))
Imre Kisd5b96fd2024-09-11 17:04:32 +020071 }
72
Imre Kis631127d2024-11-21 13:09:01 +010073 pub const fn get_level_index<const VA_BITS: usize>(
74 self,
75 translation_granule: TranslationGranule<VA_BITS>,
76 level: isize,
77 ) -> usize {
78 self.0 >> translation_granule.total_bits_at_level(level)
Imre Kisd5b96fd2024-09-11 17:04:32 +020079 }
80
81 pub const fn mask_bits(self, mask: usize) -> Self {
82 Self(self.0 & mask)
83 }
84
85 pub const fn diff(self, rhs: Self) -> Option<usize> {
86 self.0.checked_sub(rhs.0)
87 }
Imre Kisf0370e82024-11-18 16:24:55 +010088
89 pub const fn align_up(self, alignment: usize) -> Self {
90 Self(self.0.next_multiple_of(alignment))
91 }
Imre Kisd5b96fd2024-09-11 17:04:32 +020092}
93
94impl From<VirtualAddress> for usize {
95 fn from(value: VirtualAddress) -> Self {
96 value.0
97 }
98}
99
100impl From<VirtualAddress> for u64 {
101 fn from(value: VirtualAddress) -> Self {
102 value.0 as u64
103 }
104}
105
106pub struct VirtualAddressRange {
107 pub(super) start: VirtualAddress,
108 pub(super) end: VirtualAddress,
109}
110
111impl VirtualAddressRange {
112 pub fn new(start: VirtualAddress, end: VirtualAddress) -> Self {
113 Self { start, end }
114 }
115
116 pub unsafe fn from_range(value: Range<usize>) -> Self {
117 Self::new(
118 VirtualAddress::new(value.start),
119 VirtualAddress::new(value.end),
120 )
121 }
122
123 pub fn len(&self) -> Option<usize> {
124 self.end.diff(self.start)
125 }
126
127 pub fn step_by(self, step: usize) -> VirtualAddressIterator {
128 VirtualAddressIterator {
129 next: self.start,
130 end: self.end,
131 step,
132 }
133 }
134}
135
136pub struct VirtualAddressIterator {
137 next: VirtualAddress,
138 end: VirtualAddress,
139 step: usize,
140}
141
142impl Iterator for VirtualAddressIterator {
143 type Item = VirtualAddress;
144
145 fn next(&mut self) -> Option<Self::Item> {
146 if self.next < self.end {
147 let current = self.next;
148
149 self.next = if let Some(next) = self.next.add_offset(self.step) {
150 next
151 } else {
152 self.end
153 };
154
155 Some(current)
156 } else {
157 None
158 }
159 }
160}