feat(ci): Measured boot hash verification scripts
Move away from dependency on external fTPM and OP-TEE binaries through
using our own scripts to validate the hashes in the TF-A event log
during measured boot tests.
Change-Id: Ifd1fa8ce7d2091510b4c8242e25438e4d9aa61bb
Signed-off-by: Slava Andrianov <slava.andrianov@arm.com>
diff --git a/script/verify_hashes.py b/script/verify_hashes.py
new file mode 100755
index 0000000..71f11b3
--- /dev/null
+++ b/script/verify_hashes.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2025 Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script compares the hashes in the TFA event log against the files
+# generated by the build system, or in the case of the startup locality and
+# critical data, the default values.
+
+import os
+import hashlib
+from enum import Enum
+from dataclasses import dataclass
+
+# Stores identifiers in the log associated with the different images
+class ImageType(Enum):
+ UNKNOWN = "UNKNOWN"
+ BL2 = "BL_2"
+ BL31 = "SECURE_RT_EL3"
+ NT_FW_CONFIG = "NT_FW_CONFIG"
+ TB_FW_CONFIG = "TB_FW_CONFIG"
+ SOC_FW_CONFIG = "SOC_FW_CONFIG"
+ FW_CONFIG = "FW_CONFIG"
+ BL33 = "BL_33"
+ BL32 = "SECURE_RT_EL1"
+ BL32_EXTRA1 = "EXTRA1"
+ BL32_EXTRA2 = "EXTRA2"
+ STARTUP_LOCALITY = "StartupLocality"
+ CRITICAL_DATA = "CRITICAL DATA"
+
+marker_to_image_type = {
+ "BL_2" : ImageType.BL2,
+ "SECURE_RT_EL3" : ImageType.BL31,
+ "NT_FW_CONFIG" : ImageType.NT_FW_CONFIG,
+ "TB_FW_CONFIG" : ImageType.TB_FW_CONFIG,
+ "SOC_FW_CONFIG" : ImageType.SOC_FW_CONFIG,
+ "FW_CONFIG" : ImageType.FW_CONFIG,
+ "BL_33" : ImageType.BL33,
+ "StartupLocality" : ImageType.STARTUP_LOCALITY,
+ "CRITICAL DATA" : ImageType.CRITICAL_DATA,
+}
+
+class HashType(Enum):
+ UNKNOWN = "UNKNOWN"
+ SHA256 = "SHA256"
+ SHA384 = "SHA384"
+
+PCR_EVENT_MARKER = "PCR_Event2"
+ALGORITHM_MARKER = "AlgorithmId"
+DIGEST_MARKER = "Digest "
+EVENT_SIZE_MARKER = "EventSize"
+EVENT_TYPE_MARKER = "Event "
+
+BUF_SIZE = 65536
+
+# On FVPs, critical data is a hash of the non volatile registers which FVPs
+# do not alter. These registers have the below default values. They are stored
+# together in a struct which is then hashed, so this is replicated here
+TFW_NVCTR_VAL = 31
+NTFW_NVCTR_VAL = 223
+COMBINED_NVCTRS = (NTFW_NVCTR_VAL << 32) | TFW_NVCTR_VAL
+COMBINED_NVCTR_BYTES = COMBINED_NVCTRS.to_bytes(8, byteorder="little")
+
+# Need to know the location of the built files to verify their hashes
+artefacts_dir = os.environ["artefacts_dir"]
+out_file_path = f"{artefacts_dir}/tfa_event_log"
+
+# This is needed to correctly identify the files associated with BL32 and BL33
+# as the names of these can vary by test
+build_args_path = f"{artefacts_dir}/fip_build_args"
+
+# Structure:
+# - boolean for if an entry for this image has been found in the event log
+# - path to the built file
+# - hash type collected from the event log
+# - hash of the file from the event log
+@dataclass
+class ImageData:
+ found: bool
+ path: str
+ hash_type: HashType
+ event_log_hash: str
+
+ def __init__(self, initial_path: str):
+ self.found = False
+ self.path = initial_path
+ self.hash_type = HashType.UNKNOWN
+ self.event_log_hash = ""
+
+ # For convenience
+ def as_tuple(self):
+ return (self.found, self.path, self.hash_type, self.event_log_hash)
+
+# As event log entries for these images are found, their data will be stored
+# inside of the objects in this dictionary
+image_data = {
+ ImageType.BL2 : ImageData(f"{artefacts_dir}/bl2.bin"),
+ ImageType.BL31 : ImageData(f"{artefacts_dir}/bl31.bin"),
+ ImageType.FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_fw_config.dtb"),
+ ImageType.TB_FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_tb_fw_config.dtb"),
+ ImageType.NT_FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_nt_fw_config.dtb"),
+ ImageType.SOC_FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_soc_fw_config.dtb"),
+ ImageType.BL33 : ImageData(""),
+ ImageType.BL32 : ImageData(""),
+ ImageType.BL32_EXTRA1 : ImageData(""),
+ ImageType.BL32_EXTRA2 : ImageData(""),
+ ImageType.STARTUP_LOCALITY : ImageData(""),
+ ImageType.CRITICAL_DATA : ImageData(""),
+}
+
+# Sometimes alternate paths are provided for some of the images used in the
+# FIP, so these need to be checked for and stored in the image_data dictionary
+def get_build_arg_paths():
+ build_data = ""
+ with open(build_args_path, 'r') as f:
+ build_data = f.read()
+
+ components = build_data.split()
+ for comp in components:
+ split_point = comp.find('=')
+ name = comp[0:split_point]
+ path_value = comp[(split_point + 1):]
+ image_type = ImageType.UNKNOWN
+ if name == "BL33":
+ image_type = ImageType.BL33
+ elif "BL32" in name:
+ if "EXTRA1" in name:
+ image_type = ImageType.BL32_EXTRA1
+ elif "EXTRA2" in name:
+ image_type = ImageType.BL32_EXTRA2
+ else:
+ image_type = ImageType.BL32
+
+ if image_type != ImageType.UNKNOWN:
+ image_data[image_type].path = path_value
+
+ # BL32 can show up as its own binary if it is not diven a different name in
+ # the build file
+ if (image_data[ImageType.BL32].path == "") and os.path.exists(f"{artefacts_dir}/bl32.bin"):
+ image_data[ImageType.BL32].path = f"{artefacts_dir}/bl32.bin"
+
+
+# Only found images should have their hashes compared
+found_images = []
+
+# Get the hash of the file stored at the given path with the specified hash
+# algorithm
+def calc_file_hash(path: str, hash_type: HashType) -> str:
+
+ if hash_type == HashType.UNKNOWN:
+ return ""
+
+ if (path == ""):
+ return "No path provided"
+
+ if not os.path.exists(path):
+ return f"No file available at path: {path}"
+
+ # Need to use this because the Docker image used for CI uses Python 3.10
+ # so hashlib.file_digest() can't be used
+ hasher = hashlib.new(hash_type.value.lower())
+ with open(path, "rb") as bin_file:
+ while True:
+ file_data = bin_file.read(BUF_SIZE)
+ if not file_data: # EOF
+ break;
+ hasher.update(file_data)
+
+ return hasher.hexdigest()
+
+# For a event log entry, extract the hash algorithm used and the hash for this# entry
+def extract_hash(line: str, tfa_event_log_file) -> (str, HashType, ImageType):
+
+ # This skips over the PCR index and event type later these lines should be
+ # parsed and used to calculate the PCR value
+ while not ALGORITHM_MARKER in line:
+ line = tfa_event_log_file.readline()
+
+ hash_type = HashType.UNKNOWN
+ for ht in HashType:
+ if ht.value in line:
+ hash_type = ht
+ break
+
+ # Early return for now if other hash type
+ if hash_type == HashType.UNKNOWN:
+ return ("", hash_type, ImageType.UNKNOWN)
+
+ # Storing lines which contain the hash characters
+ digest_lines = []
+ line = tfa_event_log_file.readline()
+ if not DIGEST_MARKER in line:
+ return ("", hash_type, ImageType.UNKNOWN)
+
+ while not EVENT_SIZE_MARKER in line:
+ digest_lines.append(line)
+ line = tfa_event_log_file.readline()
+
+ # This line contains the event type
+ line = tfa_event_log_file.readline()
+ # This will get to the first char of the name of the image
+ sep_ind = line.find(':') + 2
+ event_substr = line[sep_ind:-1]
+ image_type = ImageType.UNKNOWN
+
+ if event_substr in marker_to_image_type:
+ image_type = marker_to_image_type[event_substr]
+ elif ImageType.BL32.value in event_substr:
+ if ImageType.BL32_EXTRA1.value in event_substr:
+ image_type = ImageType.BL32_EXTRA1
+ elif ImageType.BL32_EXTRA2.value in event_substr:
+ image_type = ImageType.BL32_EXTRA2
+ else:
+ image_type = ImageType.BL32
+
+ if image_type == ImageType.UNKNOWN:
+ return ("", hash_type, ImageType.UNKNOWN)
+
+ # Know that its one of the images that we want to know the hash of so can
+ # proceed with extracting the hash
+ hash = ""
+ for digest_line in digest_lines:
+ sep_ind = digest_line.find(" : ")
+ # + 3 to skip past the separator
+ component = digest_line[sep_ind + 3:].strip().replace(' ', '')
+ hash += component
+
+ return (hash, hash_type, image_type)
+
+
+# Update image data map with paths to BL33 and BL32 binaries
+get_build_arg_paths()
+
+with open(out_file_path, "r") as tfa_event_log_file:
+ line = tfa_event_log_file.readline()
+ while len(line) > 0:
+ # Found at the start of a event log entry
+ if PCR_EVENT_MARKER in line:
+ hash, hash_type, image_type = extract_hash(line, tfa_event_log_file)
+
+ if image_type != ImageType.UNKNOWN:
+ image_data[image_type].found = True
+ image_data[image_type].hash_type = hash_type
+ image_data[image_type].event_log_hash = hash
+ found_images.append(image_type)
+
+ line = tfa_event_log_file.readline()
+
+all_match = True
+for image_type in found_images:
+ present, file_path, hash_type, event_log_hash = image_data[image_type].as_tuple()
+ comparison_hash = ""
+ if image_type == ImageType.STARTUP_LOCALITY:
+ if int(event_log_hash) == 0:
+ comparison_hash = event_log_hash
+ else:
+ comparison_hash = "0"
+
+ elif image_type == ImageType.CRITICAL_DATA:
+ hasher = hashlib.new(hash_type.value.lower())
+ hasher.update(COMBINED_NVCTR_BYTES)
+ comparison_hash = hasher.hexdigest()
+ else:
+ comparison_hash = calc_file_hash(file_path, hash_type)
+
+ print(f"{image_type.name} hash algo: {hash_type.value}")
+ print(f"Event log hash: {event_log_hash}\nComparison hash: {comparison_hash}")
+ if comparison_hash != event_log_hash:
+ print("Mismatched hashes")
+ all_match = False
+
+# These two must always be present
+if not image_data[ImageType.BL2].found:
+ print("BL2 hash not found")
+
+if not image_data[ImageType.BL31].found:
+ print("BL31 hash not found")
+
+if all_match:
+ print("All found hashes match")