blob: f0da51f6170acd9859cf623886f668b7c14687c4 [file] [log] [blame]
Balint Dobszay5bf492f2024-07-29 17:21:32 +02001// SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates <open-source-office@arm.com>
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4use alloc::vec::Vec;
5
6#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
7pub struct Handle(pub u64);
8
9impl From<[u32; 2]> for Handle {
10 fn from(value: [u32; 2]) -> Self {
11 Self((value[1] as u64) << 32 | value[0] as u64)
12 }
13}
14
15impl From<Handle> for [u32; 2] {
16 fn from(value: Handle) -> Self {
17 [value.0 as u32, (value.0 >> 32) as u32]
18 }
19}
20
21impl Handle {
22 pub const INVALID: u64 = 0xffff_ffff_ffff_ffff;
23}
24
25#[derive(Debug, Default, Clone, Copy, PartialEq)]
26#[repr(u16)]
27pub enum Cacheability {
28 #[default]
29 NonCacheable = Self::NON_CACHEABLE << Self::SHIFT,
30 WriteBack = Self::WRITE_BACK << Self::SHIFT,
31}
32
33impl TryFrom<u16> for Cacheability {
34 type Error = ();
35
36 fn try_from(value: u16) -> Result<Self, Self::Error> {
37 match (value >> Self::SHIFT) & Self::MASK {
38 Self::NON_CACHEABLE => Ok(Cacheability::NonCacheable),
39 Self::WRITE_BACK => Ok(Cacheability::WriteBack),
40 _ => Err(()),
41 }
42 }
43}
44
45impl Cacheability {
46 const SHIFT: usize = 2;
47 const MASK: u16 = 0b11;
48 const NON_CACHEABLE: u16 = 0b01;
49 const WRITE_BACK: u16 = 0b11;
50}
51
52#[derive(Debug, Default, Clone, Copy, PartialEq)]
53#[repr(u16)]
54pub enum Shareability {
55 #[default]
56 NonShareable = Self::NON_SHAREABLE << Self::SHIFT,
57 Outer = Self::OUTER << Self::SHIFT,
58 Inner = Self::INNER << Self::SHIFT,
59}
60
61impl TryFrom<u16> for Shareability {
62 type Error = ();
63
64 fn try_from(value: u16) -> Result<Self, Self::Error> {
65 match (value >> Self::SHIFT) & Self::MASK {
66 Self::NON_SHAREABLE => Ok(Self::NonShareable),
67 Self::OUTER => Ok(Self::Outer),
68 Self::INNER => Ok(Self::Inner),
69 _ => Err(()),
70 }
71 }
72}
73
74impl Shareability {
75 const SHIFT: usize = 0;
76 const MASK: u16 = 0b11;
77 const NON_SHAREABLE: u16 = 0b00;
78 const OUTER: u16 = 0b10;
79 const INNER: u16 = 0b11;
80}
81
82#[derive(Debug, Default, Clone, Copy)]
83#[repr(u16)]
84pub enum DeviceMemAttributes {
85 #[default]
86 DevnGnRnE = Self::DEV_NGNRNE << Self::SHIFT,
87 DevnGnRE = Self::DEV_NGNRE << Self::SHIFT,
88 DevnGRE = Self::DEV_NGRE << Self::SHIFT,
89 DevGRE = Self::DEV_GRE << Self::SHIFT,
90}
91
92impl TryFrom<u16> for DeviceMemAttributes {
93 type Error = ();
94
95 fn try_from(value: u16) -> Result<Self, Self::Error> {
96 // TODO: sanity check if it's device memory
97 match (value >> Self::SHIFT) & Self::MASK {
98 Self::DEV_NGNRNE => Ok(Self::DevnGnRnE),
99 Self::DEV_NGNRE => Ok(Self::DevnGnRE),
100 Self::DEV_NGRE => Ok(Self::DevnGRE),
101 Self::DEV_GRE => Ok(Self::DevGRE),
102 _ => Err(()),
103 }
104 }
105}
106
107impl DeviceMemAttributes {
108 const SHIFT: usize = 2;
109 const MASK: u16 = 0b11;
110 const DEV_NGNRNE: u16 = 0b00;
111 const DEV_NGNRE: u16 = 0b01;
112 const DEV_NGRE: u16 = 0b10;
113 const DEV_GRE: u16 = 0b11;
114}
115
116#[derive(Debug, Default, Clone, Copy)]
117pub enum MemType {
118 #[default]
119 NotSpecified,
120 Device(DeviceMemAttributes),
121 Normal {
122 cacheability: Cacheability,
123 shareability: Shareability,
124 },
125}
126
127impl TryFrom<u16> for MemType {
128 type Error = ();
129
130 fn try_from(value: u16) -> Result<Self, Self::Error> {
131 match (value >> Self::SHIFT) & Self::MASK {
132 Self::NOT_SPECIFIED => Ok(Self::NotSpecified),
133 Self::DEVICE => Ok(Self::Device(DeviceMemAttributes::try_from(value)?)),
134 Self::NORMAL => Ok(Self::Normal {
135 cacheability: Cacheability::try_from(value)?,
136 shareability: Shareability::try_from(value)?,
137 }),
138 _ => Err(()),
139 }
140 }
141}
142
143impl From<MemType> for u16 {
144 fn from(value: MemType) -> Self {
145 match value {
146 MemType::NotSpecified => MemType::NOT_SPECIFIED << MemType::SHIFT,
147 MemType::Device(attr) => attr as u16 | MemType::DEVICE << MemType::SHIFT,
148 MemType::Normal {
149 cacheability,
150 shareability,
151 } => cacheability as u16 | shareability as u16 | MemType::NORMAL << MemType::SHIFT,
152 }
153 }
154}
155
156impl MemType {
157 const SHIFT: usize = 4;
158 const MASK: u16 = 0b11;
159 const NOT_SPECIFIED: u16 = 0b00;
160 const DEVICE: u16 = 0b01;
161 const NORMAL: u16 = 0b10;
162}
163
164#[derive(Debug, Default, Clone, Copy, PartialEq)]
165#[repr(u16)]
166pub enum MemRegionSecurity {
167 #[default]
168 Secure = Self::SECURE << Self::SHIFT,
169 NonSecure = Self::NON_SECURE << Self::SHIFT,
170}
171
172impl TryFrom<u16> for MemRegionSecurity {
173 type Error = ();
174
175 fn try_from(value: u16) -> Result<Self, Self::Error> {
176 match (value >> Self::SHIFT) & Self::MASK {
177 Self::SECURE => Ok(Self::Secure),
178 Self::NON_SECURE => Ok(Self::NonSecure),
179 _ => Err(()),
180 }
181 }
182}
183
184impl MemRegionSecurity {
185 const SHIFT: usize = 6;
186 const MASK: u16 = 0b1;
187 const SECURE: u16 = 0b0;
188 const NON_SECURE: u16 = 0b1;
189}
190
191/// FF-A v1.1: Table 10.18: Memory region attributes descriptor
192#[derive(Debug, Default, Clone, Copy)]
193pub struct MemRegionAttributes {
194 pub security: MemRegionSecurity,
195 pub mem_type: MemType,
196}
197
198impl TryFrom<u16> for MemRegionAttributes {
199 type Error = ();
200
201 fn try_from(value: u16) -> Result<Self, Self::Error> {
202 // bits[15:7]: Reserved (MBZ)
203 assert_eq!(value >> 7, 0);
204 Ok(Self {
205 security: MemRegionSecurity::try_from(value)?,
206 mem_type: MemType::try_from(value)?,
207 })
208 }
209}
210
211impl From<MemRegionAttributes> for u16 {
212 fn from(value: MemRegionAttributes) -> Self {
213 value.security as u16 | u16::from(value.mem_type)
214 }
215}
216
217#[derive(Debug, Default, Clone, Copy)]
218#[repr(u8)]
219pub enum InstuctionAccessPerm {
220 #[default]
221 NotSpecified = Self::NOT_SPECIFIED << Self::SHIFT,
222 NotExecutable = Self::NOT_EXECUTABLE << Self::SHIFT,
223 Executable = Self::EXECUTABLE << Self::SHIFT,
224}
225
226impl TryFrom<u8> for InstuctionAccessPerm {
227 type Error = ();
228
229 fn try_from(value: u8) -> Result<Self, Self::Error> {
230 match (value >> Self::SHIFT) & Self::MASK {
231 Self::NOT_SPECIFIED => Ok(Self::NotSpecified),
232 Self::NOT_EXECUTABLE => Ok(Self::NotExecutable),
233 Self::EXECUTABLE => Ok(Self::Executable),
234 _ => Err(()),
235 }
236 }
237}
238
239impl InstuctionAccessPerm {
240 const SHIFT: usize = 2;
241 const MASK: u8 = 0b11;
242 const NOT_SPECIFIED: u8 = 0b00;
243 const NOT_EXECUTABLE: u8 = 0b01;
244 const EXECUTABLE: u8 = 0b10;
245}
246
247#[derive(Debug, Default, Clone, Copy)]
248#[repr(u8)]
249pub enum DataAccessPerm {
250 #[default]
251 NotSpecified = Self::NOT_SPECIFIED << Self::SHIFT,
252 ReadOnly = Self::READ_ONLY << Self::SHIFT,
253 ReadWrite = Self::READ_WRITE << Self::SHIFT,
254}
255
256impl TryFrom<u8> for DataAccessPerm {
257 type Error = ();
258
259 fn try_from(value: u8) -> Result<Self, Self::Error> {
260 match (value >> Self::SHIFT) & Self::MASK {
261 Self::NOT_SPECIFIED => Ok(Self::NotSpecified),
262 Self::READ_ONLY => Ok(Self::ReadOnly),
263 Self::READ_WRITE => Ok(Self::ReadWrite),
264 _ => Err(()),
265 }
266 }
267}
268
269impl DataAccessPerm {
270 const SHIFT: usize = 0;
271 const MASK: u8 = 0b11;
272 const NOT_SPECIFIED: u8 = 0b00;
273 const READ_ONLY: u8 = 0b01;
274 const READ_WRITE: u8 = 0b10;
275}
276
277/// FF-A v1.1: Table 10.15: Memory access permissions descriptor
278#[derive(Debug, Default, Clone, Copy)]
279pub struct MemAccessPermDesc {
280 pub endpoint_id: u16,
281 pub instr_access: InstuctionAccessPerm,
282 pub data_access: DataAccessPerm,
283 pub flags: u8, // TODO
284}
285
286/// FF-A v1.1 Table 10.16: Endpoint memory access descriptor
287#[derive(Debug, Default, Clone, Copy)]
288pub struct EndpointMemAccessDesc {
289 pub mem_access_perm: MemAccessPermDesc,
290 pub composite_offset: u32,
291}
292
293impl EndpointMemAccessDesc {
294 const SIZE: usize = 16;
295}
296
297/// FF-A v1.1 Table 10.21: Flags usage in FFA_MEM_DONATE, FFA_MEM_LEND and FFA_MEM_SHARE ABIs
298/// FF-A v1.1 Table 10.22: Flags usage in FFA_MEM_RETRIEVE_REQ ABI
299/// FF-A v1.1 Table 10.23: Flags usage in FFA_MEM_RETRIEVE_RESP ABI
300#[derive(Debug, Default, Clone, Copy)]
301pub struct MemTransactionFlags(pub u32); // TODO: use bitflags?
302
303#[allow(dead_code)]
304impl MemTransactionFlags {
305 const MEM_SHARE_MASK: u32 = 0b11;
306 const MEM_RETRIEVE_REQ_MASK: u32 = 0b11_1111_1111;
307 const MEM_RETRIEVE_RESP_MASK: u32 = 0b1_1111;
308 const ZERO_MEMORY: u32 = 0b1;
309 const TIME_SLICING: u32 = 0b1 << 1;
310 const ZERO_AFTER_RELINQ: u32 = 0b1 << 2;
311 pub const TYPE_SHARE: u32 = 0b01 << 3;
312 const TYPE_LEND: u32 = 0b10 << 3;
313 const TYPE_DONATE: u32 = 0b11 << 3;
314 const ALIGN_HINT_MASK: u32 = 0b1111 << 5;
315 const HINT_VALID: u32 = 0b1 << 9;
316}
317
318/// FF-A v1.1: Table 10.20: Memory transaction descriptor
319#[derive(Debug, Default)]
320pub struct MemTransactionDesc {
321 pub sender_id: u16,
322 pub mem_region_attr: MemRegionAttributes,
323 pub flags: MemTransactionFlags,
324 pub handle: Handle,
325 pub tag: u64, // TODO
326 pub ep_access_descs: Vec<EndpointMemAccessDesc>,
327}
328
329/// FF-A v1.1 Table 10.13: Composite memory region descriptor
330#[derive(Debug, Default)]
331pub struct CompositeMemRegionDesc {
332 pub total_page_cnt: u32,
333 pub constituents: Vec<ConstituentMemRegionDesc>,
334}
335
336impl CompositeMemRegionDesc {
337 const CONSTITUENT_ARRAY_OFFSET: usize = 16;
338}
339
340/// FF-A v1.1 Table 10.14: Constituent memory region descriptor
341#[derive(Debug, Default, Clone, Copy)]
342pub struct ConstituentMemRegionDesc {
343 pub address: u64,
344 pub page_cnt: u32,
345}
346
347impl ConstituentMemRegionDesc {
348 const SIZE: usize = 16;
349}
350
351impl MemTransactionDesc {
352 // Must be 16 byte aligned
353 const ENDPOINT_MEM_ACCESS_DESC_OFFSET: usize = 48;
354
355 pub fn create(&self, composite_desc: &CompositeMemRegionDesc, buf: &mut [u8]) -> usize {
356 let mem_access_desc_cnt = self.ep_access_descs.len();
357 let composite_offset = (Self::ENDPOINT_MEM_ACCESS_DESC_OFFSET
358 + mem_access_desc_cnt * EndpointMemAccessDesc::SIZE)
359 .next_multiple_of(8);
360
361 // Offset 0, length 2: ID of the Owner endpoint.
362 buf[0..2].copy_from_slice(&self.sender_id.to_le_bytes());
363
364 // Offset 2, length 2: Memory region attributes
365 let mem_reg_attr = u16::from(self.mem_region_attr);
366 buf[2..4].copy_from_slice(&mem_reg_attr.to_le_bytes());
367
368 // Offset 4, length 4: Flags
369 buf[4..8].copy_from_slice(&self.flags.0.to_le_bytes());
370
371 // Offset 8, length 8: Handle
372 buf[8..16].copy_from_slice(&self.handle.0.to_le_bytes());
373
374 // Offset 16, length 8: Tag
375 buf[16..24].copy_from_slice(&self.tag.to_le_bytes());
376
377 // Offset 24, length 4: Size of each endpoint memory access descriptor in the array.
378 buf[24..28].copy_from_slice(&(EndpointMemAccessDesc::SIZE as u32).to_le_bytes());
379
380 // Offset 28, length 4: Count of endpoint memory access descriptors.
381 buf[28..32].copy_from_slice(&(mem_access_desc_cnt as u32).to_le_bytes());
382
383 // Offset 32, length 4: 16-byte aligned offset from the base address of this descriptor to the first element of the Endpoint memory access descriptor array.
384 buf[32..36].copy_from_slice(&(Self::ENDPOINT_MEM_ACCESS_DESC_OFFSET as u32).to_le_bytes());
385
386 let mut offset = Self::ENDPOINT_MEM_ACCESS_DESC_OFFSET;
387 for desc in &self.ep_access_descs {
388 // Offset 0, length 4: Memory access permissions descriptor
389 // Offset 0, length 2: 16-bit ID of endpoint to which the memory access permissions apply
390 buf[offset..offset + 2]
391 .copy_from_slice(&desc.mem_access_perm.endpoint_id.to_le_bytes());
392
393 // Offset 2, length 1: Permissions used to access a memory region.
394 buf[offset + 2] =
395 desc.mem_access_perm.data_access as u8 | desc.mem_access_perm.instr_access as u8;
396
397 // Offset 3, length 1: ABI specific flags
398 buf[offset + 2] = desc.mem_access_perm.flags;
399
400 // Offset 4, length 4: Offset to the composite memory region descriptor to which the endpoint access permissions apply
401 buf[offset + 4..offset + 8].copy_from_slice(&(composite_offset as u32).to_le_bytes());
402
403 // Offset 8, length 8: Reserved (MBZ)
404 buf[offset + 8..offset + 16].fill(0);
405
406 offset += EndpointMemAccessDesc::SIZE;
407 }
408
409 offset = composite_offset;
410 // Offset 0, length 4: Size of the memory region described as the count of 4K pages
411 buf[offset..offset + 4].copy_from_slice(&composite_desc.total_page_cnt.to_le_bytes());
412
413 // Offset 4, length 4: Count of address ranges specified using constituent memory region descriptors
414 let addr_range_cnt = composite_desc.constituents.len() as u32;
415 buf[offset + 4..offset + 8].copy_from_slice(&addr_range_cnt.to_le_bytes());
416
417 // Offset 8, length 8: Reserved (MBZ)
418 buf[offset + 8..offset + 16].fill(0);
419
420 offset = composite_offset + CompositeMemRegionDesc::CONSTITUENT_ARRAY_OFFSET;
421 for constituent in &composite_desc.constituents {
422 // Offset 0, length 8: Base VA, PA or IPA of constituent memory region aligned to the page size (4K) granularity.
423 buf[offset..offset + 8].copy_from_slice(&constituent.address.to_le_bytes());
424
425 // Offset 8, length 4: Number of 4K pages in constituent memory region
426 buf[offset + 8..offset + 12].copy_from_slice(&constituent.page_cnt.to_le_bytes());
427
428 // Offset 12, length 4: Reserved (MBZ)
429 buf[offset + 12..offset + 16].fill(0);
430
431 offset += ConstituentMemRegionDesc::SIZE;
432 }
433
434 offset
435 }
436
437 pub fn parse(
438 &mut self,
439 composite_desc: &mut CompositeMemRegionDesc,
440 buf: &[u8],
441 ) -> Result<(), ()> {
442 // Offset 0, length 2: ID of the Owner endpoint.
443 self.sender_id = u16::from_le_bytes(buf[0..2].try_into().unwrap());
444
445 // Offset 2, length 2: Memory region attributes
446 let mem_attr = u16::from_le_bytes(buf[2..4].try_into().unwrap());
447 self.mem_region_attr = MemRegionAttributes::try_from(mem_attr)?;
448
449 // Offset 4, length 4: Flags
450 self.flags.0 = u32::from_le_bytes(buf[4..8].try_into().unwrap()); // TODO: validate
451
452 // Offset 8, length 8: Handle
453 self.handle.0 = u64::from_le_bytes(buf[8..16].try_into().unwrap());
454
455 // Offset 16, length 8: Tag
456 self.tag = u64::from_le_bytes(buf[16..24].try_into().unwrap());
457
458 // Offset 24, length 4: Size of each endpoint memory access descriptor in the array.
459 let endpoint_mem_access_desc_size = u32::from_le_bytes(buf[24..28].try_into().unwrap());
460 assert_eq!(
461 EndpointMemAccessDesc::SIZE,
462 endpoint_mem_access_desc_size as usize
463 );
464
465 // Offset 28, length 4: Count of endpoint memory access descriptors.
466 let endpoint_mem_access_desc_cnt = u32::from_le_bytes(buf[28..32].try_into().unwrap());
467
468 // Offset 32, length 4: 16-byte aligned offset from the base address of this descriptor to
469 // the first element of the Endpoint memory access descriptor array.
470 let endpoint_mem_access_desc_offset = u32::from_le_bytes(buf[32..36].try_into().unwrap());
471
472 assert!(
473 endpoint_mem_access_desc_offset
474 + endpoint_mem_access_desc_cnt * endpoint_mem_access_desc_size
475 <= buf.len() as u32
476 );
477
478 let mut composite_offset = 0;
479 let mut offset = endpoint_mem_access_desc_offset as usize;
480 for _ in 0..endpoint_mem_access_desc_cnt {
481 let mut desc = EndpointMemAccessDesc::default();
482 desc.mem_access_perm.endpoint_id =
483 u16::from_le_bytes(buf[offset..offset + 2].try_into().unwrap());
484
485 desc.mem_access_perm.instr_access = InstuctionAccessPerm::try_from(buf[offset + 2])?;
486 desc.mem_access_perm.data_access = DataAccessPerm::try_from(buf[offset + 2])?;
487 desc.mem_access_perm.flags = buf[offset + 3];
488 desc.composite_offset =
489 u32::from_le_bytes(buf[offset + 4..offset + 8].try_into().unwrap());
490 // TODO: different composite offsets?
491 composite_offset = desc.composite_offset as usize;
492
493 self.ep_access_descs.push(desc);
494
495 offset += endpoint_mem_access_desc_size as usize;
496 }
497
498 if self.handle != Handle(0) || composite_offset == 0 {
499 return Ok(());
500 }
501
502 composite_desc.total_page_cnt = u32::from_le_bytes(
503 buf[composite_offset..composite_offset + 4]
504 .try_into()
505 .unwrap(),
506 );
507
508 let addr_range_cnt = u32::from_le_bytes(
509 buf[composite_offset + 4..composite_offset + 8]
510 .try_into()
511 .unwrap(),
512 );
513
514 offset = composite_offset + CompositeMemRegionDesc::CONSTITUENT_ARRAY_OFFSET;
515 let mut total_page_cnt = 0;
516 for _ in 0..addr_range_cnt {
517 let desc = ConstituentMemRegionDesc {
518 address: u64::from_le_bytes(buf[offset..offset + 8].try_into().unwrap()),
519 page_cnt: u32::from_le_bytes(buf[offset + 8..offset + 12].try_into().unwrap()),
520 };
521 total_page_cnt += desc.page_cnt;
522
523 composite_desc.constituents.push(desc);
524
525 offset += ConstituentMemRegionDesc::SIZE;
526 }
527
528 assert_eq!(total_page_cnt, composite_desc.total_page_cnt);
529
530 Ok(())
531 }
532}
533
534/// FF-A v1.1 Table 16.25: Descriptor to relinquish a memory region
535#[derive(Debug, Default)]
536pub struct MemRelinquishDesc {
537 pub handle: Handle,
538 pub flags: u32,
539 pub endpoints: Vec<u16>,
540}
541
542impl MemRelinquishDesc {
543 const ENDPOINT_ARRAY_OFFSET: usize = 16;
544
545 pub fn parse(&mut self, buf: &[u8]) -> Result<(), ()> {
546 // Offset 0, length 8: Handle
547 self.handle.0 = u64::from_le_bytes(buf[0..8].try_into().unwrap());
548
549 // Offset 8, length 4: Flags
550 self.flags = u32::from_le_bytes(buf[8..12].try_into().unwrap()); // TODO: validate
551
552 // Offset 12, length 4: Count of endpoint ID entries in the Endpoint array
553 let endpoint_cnt = u32::from_le_bytes(buf[12..16].try_into().unwrap());
554
555 let mut offset = MemRelinquishDesc::ENDPOINT_ARRAY_OFFSET;
556 for _ in 0..endpoint_cnt as usize {
557 let endpoint = u16::from_le_bytes(buf[offset..offset + 2].try_into().unwrap());
558 self.endpoints.push(endpoint);
559 offset += 2;
560 }
561
562 Ok(())
563 }
564}
565
566#[cfg(test)]
567mod tests {
568 use super::*;
569
570 #[allow(dead_code)]
571 const MEM_SHARE_FROM_SP1: &[u8] = &[
572 0x05, 0x80, 0x2f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
573 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
574 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
575 0x00, 0x00, 0x00, 0x03, 0x80, 0x02, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
576 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
577 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0x10, 0x40, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
578 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
579 ];
580
581 #[allow(dead_code)]
582 const MEM_SHARE_FROM_SP2: &[u8] = &[
583 0x06, 0x80, 0x2f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
584 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
585 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
586 0x00, 0x00, 0x00, 0x05, 0x80, 0x02, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
587 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
588 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x07, 0x40, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
589 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
590 ];
591
592 #[allow(dead_code)]
593 const MEM_RETRIEVE_REQ_FROM_SP1: &[u8] = &[
594 0x05, 0x80, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00,
595 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
596 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
597 0x00, 0x00, 0x00, 0x03, 0x80, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
598 0x00, 0x00, 0x00, 0x00,
599 ];
600
601 #[allow(dead_code)]
602 const MEM_RETRIEVE_REQ_FROM_SP2: &[u8] = &[
603 0x06, 0x80, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00,
604 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
605 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
606 0x00, 0x00, 0x00, 0x05, 0x80, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
607 0x00, 0x00, 0x00, 0x00,
608 ];
609
610 #[allow(dead_code)]
611 const MEM_SHARE_FROM_NWD: &[u8] = &[
612 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
613 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
614 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
615 0x00, 0x00, 0x00, 0x03, 0x80, 0x02, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
616 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
617 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x22, 0x80, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00,
618 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
619 ];
620
621 #[test]
622 fn mem_share() {
623 let mut transaction_desc = MemTransactionDesc::default();
624 let mut composite_desc = CompositeMemRegionDesc::default();
625
626 transaction_desc
627 .parse(&mut composite_desc, MEM_RETRIEVE_REQ_FROM_SP1)
628 .unwrap();
629
630 println!("transaction desc: {:#x?}", transaction_desc);
631 println!("endpont desc: {:#x?}", transaction_desc.ep_access_descs);
632 println!("composite desc: {:#x?}", composite_desc);
633 println!("constituent desc: {:#x?}", composite_desc.constituents);
634 }
635}