blob: a8f2c39b1bb816a18e8743b962d8aa9c70a90ab8 [file] [log] [blame]
Imre Kisd5b96fd2024-09-11 17:04:32 +02001// SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com>
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use core::ops::Range;
5
6use super::Xlat;
7
8#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
9pub struct PhysicalAddress(pub(super) usize);
10
11impl PhysicalAddress {
12 pub const unsafe fn new(address: usize) -> Self {
13 Self(address)
14 }
15
16 pub const fn add_offset(self, offset: usize) -> Option<Self> {
17 if let Some(address) = self.0.checked_add(offset) {
18 Some(Self(address))
19 } else {
20 None
21 }
22 }
23
24 pub const fn identity_va(self) -> VirtualAddress {
25 VirtualAddress(self.0)
26 }
27
28 pub const fn diff(self, rhs: Self) -> Option<usize> {
29 self.0.checked_sub(rhs.0)
30 }
31}
32
33impl From<PhysicalAddress> for usize {
34 fn from(value: PhysicalAddress) -> Self {
35 value.0
36 }
37}
38
39impl From<PhysicalAddress> for u64 {
40 fn from(value: PhysicalAddress) -> Self {
41 value.0 as u64
42 }
43}
44
45#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
46pub struct VirtualAddress(pub(super) usize);
47
48impl VirtualAddress {
49 pub const unsafe fn new(address: usize) -> Self {
50 Self(address)
51 }
52
53 pub const fn add_offset(self, offset: usize) -> Option<Self> {
54 if let Some(address) = self.0.checked_add(offset) {
55 Some(Self(address))
56 } else {
57 None
58 }
59 }
60
61 pub const fn identity_pa(self) -> PhysicalAddress {
62 PhysicalAddress(self.0)
63 }
64
65 pub const fn mask_for_level(self, level: usize) -> Self {
66 Self(self.0 & (Xlat::GRANULE_SIZES[level] - 1))
67 }
68
69 pub const fn get_level_index(self, level: usize) -> usize {
70 self.0 / Xlat::GRANULE_SIZES[level]
71 }
72
73 pub const fn mask_bits(self, mask: usize) -> Self {
74 Self(self.0 & mask)
75 }
76
77 pub const fn diff(self, rhs: Self) -> Option<usize> {
78 self.0.checked_sub(rhs.0)
79 }
Imre Kisf0370e82024-11-18 16:24:55 +010080
81 pub const fn align_up(self, alignment: usize) -> Self {
82 Self(self.0.next_multiple_of(alignment))
83 }
Imre Kisd5b96fd2024-09-11 17:04:32 +020084}
85
86impl From<VirtualAddress> for usize {
87 fn from(value: VirtualAddress) -> Self {
88 value.0
89 }
90}
91
92impl From<VirtualAddress> for u64 {
93 fn from(value: VirtualAddress) -> Self {
94 value.0 as u64
95 }
96}
97
98pub struct VirtualAddressRange {
99 pub(super) start: VirtualAddress,
100 pub(super) end: VirtualAddress,
101}
102
103impl VirtualAddressRange {
104 pub fn new(start: VirtualAddress, end: VirtualAddress) -> Self {
105 Self { start, end }
106 }
107
108 pub unsafe fn from_range(value: Range<usize>) -> Self {
109 Self::new(
110 VirtualAddress::new(value.start),
111 VirtualAddress::new(value.end),
112 )
113 }
114
115 pub fn len(&self) -> Option<usize> {
116 self.end.diff(self.start)
117 }
118
119 pub fn step_by(self, step: usize) -> VirtualAddressIterator {
120 VirtualAddressIterator {
121 next: self.start,
122 end: self.end,
123 step,
124 }
125 }
126}
127
128pub struct VirtualAddressIterator {
129 next: VirtualAddress,
130 end: VirtualAddress,
131 step: usize,
132}
133
134impl Iterator for VirtualAddressIterator {
135 type Item = VirtualAddress;
136
137 fn next(&mut self) -> Option<Self::Item> {
138 if self.next < self.end {
139 let current = self.next;
140
141 self.next = if let Some(next) = self.next.add_offset(self.step) {
142 next
143 } else {
144 self.end
145 };
146
147 Some(current)
148 } else {
149 None
150 }
151 }
152}