feat(ci): Measured boot hash verification scripts
Move away from dependency on external fTPM and OP-TEE binaries through
using our own scripts to validate the hashes in the TF-A event log
during measured boot tests.
Change-Id: Ifd1fa8ce7d2091510b4c8242e25438e4d9aa61bb
Signed-off-by: Slava Andrianov <slava.andrianov@arm.com>
diff --git a/expect/compare_hashes.inc b/expect/compare_hashes.inc
new file mode 100644
index 0000000..4e9c358
--- /dev/null
+++ b/expect/compare_hashes.inc
@@ -0,0 +1,21 @@
+#
+# Copyright (c) 2025 Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+# Expect script for monitoring the results of the TFA event log hash
+# verification script
+#
+
+set mismatched_marker "Mismatched"
+set missing_marker "not found"
+
+set compare_hash_path $env(ci_root)
+append compare_hash_path "/script/verify_hashes.py"
+
+set hash_verification_output [exec python3 $compare_hash_path 2>@1]
+puts $hash_verification_output
+if {[regexp $mismatched_marker|$missing_marker $hash_verification_output]} {
+ message "Hash mismatch or missing hash detected"
+ exit_uart -1
+}
diff --git a/expect/linux-tpm-384.exp b/expect/linux-tpm-384.exp
index 9dd4731..7ab9bf2 100644
--- a/expect/linux-tpm-384.exp
+++ b/expect/linux-tpm-384.exp
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2021-2022, Arm Limited. All rights reserved.
+# Copyright (c) 2021-2025, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -11,14 +11,12 @@
source [file join [file dirname [info script]] utils.inc]
source [file join [file dirname [info script]] handle-arguments.inc]
-# File to store the event log from the ftpm service.
-set TFA_DIGEST [get_param tfa_digest "tfa_event_log"]
-set FTPM_DIGEST [get_param ftpm_digest "ftpm_event_log"]
+set TFA_EVENT_LOG [get_param tfa_log "tfa_event_log"]
# regexp for non-zero PCR0
set non_zero_pcr "(?!(\\s00){16})((\\s(\[0-9a-f\]){2}){16}\\s)"
-capture_log $TFA_DIGEST "Booting BL31|Finished using crypto library"
+capture_log $TFA_EVENT_LOG "Booting BL31|Finished using crypto library"
expect {
"login" {
@@ -52,4 +50,6 @@
expect_string "#" "finished reading PCRs"
-compare_log $TFA_DIGEST $FTPM_DIGEST
+if {[info exists ::env(verify_hashes)]} {
+ source [file join [file dirname [info script]] compare_hashes.inc]
+}
diff --git a/expect/linux-tpm.exp b/expect/linux-tpm.exp
index 27ff89e..2bb6654 100644
--- a/expect/linux-tpm.exp
+++ b/expect/linux-tpm.exp
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2020-2022, Arm Limited. All rights reserved.
+# Copyright (c) 2020-2025, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -9,15 +9,13 @@
source [file join [file dirname [info script]] utils.inc]
source [file join [file dirname [info script]] handle-arguments.inc]
-# File to store the event log from the ftpm service.
-set TFA_DIGEST [get_param tfa_digest "tfa_event_log"]
-set FTPM_DIGEST [get_param ftpm_digest "ftpm_event_log"]
+set TFA_EVENT_LOG [get_param tfa_log "tfa_event_log"]
# regexp for non-zero PCR0
set non_zero_pcr "(?!(\\s00){16})((\\s(\[0-9a-f\]){2}){16}\\s)"
set zero_pcr "(\\s00){16}\\s+(00\\s){16}"
-capture_log $TFA_DIGEST "Booting BL31|Finished using crypto library"
+capture_log $TFA_EVENT_LOG "Booting BL31|Finished using crypto library"
expect {
"login" {
@@ -70,4 +68,6 @@
expect_string "#" "finished reading PCRs"
-compare_log $TFA_DIGEST $FTPM_DIGEST
+if {[info exists ::env(verify_hashes)]} {
+ source [file join [file dirname [info script]] compare_hashes.inc]
+}
diff --git a/expect/tftf.inc b/expect/tftf.inc
index b47d1c3..095403b 100644
--- a/expect/tftf.inc
+++ b/expect/tftf.inc
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2023 Arm Limited. All rights reserved.
+# Copyright (c) 2023-2025 Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -10,21 +10,32 @@
expect_re "Running at NS-EL(1|2)"
+# Compares event log of TF-A in BL2 against event logs of BL32 and BL33
+# currently not available for all measured boot tests
if {[info exists ::env(measured_boot)]} {
- capture_and_compare_log tftf_event_log "TEST COMPLETE" tfa_event_log
+ capture_and_compare_log tftf_event_log "TEST COMPLETE" $TFA_EVENT_LOG
}
+set uart_return_value ""
expect {
"Tests Failed : 0" {
expect_string "Exiting tests." "all TFTF tests passed"
- exit_uart 0
+ set uart_return_value 0
}
"Tests Passed : 0" {
expect_string "Exiting tests." "no TFTF tests passed"
- exit_uart -1
+ set uart_return_value -1
}
-re "Tests Failed : \[^0]" {
expect_string "Exiting tests." "one or more TFTF tests failed"
- exit_uart -1
+ set uart_return_value -1
}
}
+
+# Verifies the hashes in the TF-A event log for measured boot tests
+if {[info exists ::env(verify_hashes)]} {
+ message "Starting measured boot hash verification"
+ source [file join [file dirname [info script]] compare_hashes.inc]
+}
+
+exit_uart $uart_return_value
diff --git a/expect/tpm-logs.exp b/expect/tpm-logs.exp
index e46beb6..8b4bb62 100644
--- a/expect/tpm-logs.exp
+++ b/expect/tpm-logs.exp
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2020, Arm Limited. All rights reserved.
+# Copyright (c) 2020-2025, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -9,9 +9,4 @@
source [file join [file dirname [info script]] utils.inc]
source [file join [file dirname [info script]] handle-arguments.inc]
-# File to store the event log from the ftpm service.
-set FTPM_DIGEST [get_param ftpm_digest "ftpm_event_log"]
-
-capture_log $FTPM_DIGEST "returned value"
-
-source [file join [file dirname [info script]] uart-hold.inc]
\ No newline at end of file
+source [file join [file dirname [info script]] uart-hold.inc]
diff --git a/expect/trusted-firmware.inc b/expect/trusted-firmware.inc
index f2c4e99..f6c6652 100644
--- a/expect/trusted-firmware.inc
+++ b/expect/trusted-firmware.inc
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2025, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -8,6 +8,8 @@
# This script is not standalone and should be sourced by a top expect script.
#
+set TFA_EVENT_LOG [get_param tfa_log "tfa_event_log"]
+
# Initial boot message won't be present if we're starting at BL31. Skip waiting
# for them by inspecting the environment variable 'skip_early_boot_msgs'.
if {![info exists ::env(skip_early_boot_msgs)]} {
@@ -28,11 +30,14 @@
# Capture the event log from TF-A if we are running with measured boot
# enabled.
- if {[info exists ::env(measured_boot)]} {
- capture_log tfa_event_log "BL1: Booting BL31|Finished using crypto library"
+ if {[info exists ::env(measured_boot)] || [info exists ::env(verify_hashes)]} {
+ capture_log $TFA_EVENT_LOG "BL1: Booting BL31|Finished using crypto library"
} else {
expect_string "BL1: Booting BL31"
}
} else {
message "Skipping early boot messages from BL1 and BL2"
+ if {[info exists ::env(verify_hashes)]} {
+ capture_log $TFA_EVENT_LOG "BL1: Booting BL31|BL2: Booting BL31|Finished using crypto library"
+ }
}
diff --git a/expect/tsp.exp b/expect/tsp.exp
index 24b99fd..ba8d4c6 100644
--- a/expect/tsp.exp
+++ b/expect/tsp.exp
@@ -43,11 +43,12 @@
}
}
+set TFA_EVENT_LOG [get_param tfa_log "tfa_event_log"]
# In case of measured boot, capture the event log that's been passed to the TSP,
# this is used to ensure that the same event log is produced in BL2. Match the
# previously stored digest with the one generated by the TSP service.
if {[info exists ::env(measured_boot)]} {
- capture_and_compare_log tsp_event_log "TSP: cpu" tfa_event_log
+ capture_and_compare_log tsp_event_log "TSP: cpu" $TFA_EVENT_LOG
}
-source [file join [file dirname [info script]] uart-hold.inc]
\ No newline at end of file
+source [file join [file dirname [info script]] uart-hold.inc]
diff --git a/script/build_package.sh b/script/build_package.sh
index 535b7e3..b3b1b1e 100755
--- a/script/build_package.sh
+++ b/script/build_package.sh
@@ -314,6 +314,13 @@
set +a
fi
+ if [ "$(get_tf_opt MEASURED_BOOT)" = 1 ]; then
+ # These are needed for accurate hash verification
+ local build_args_path="${workspace}/fip_build_args"
+ echo $@ > $build_args_path
+ archive_file $build_args_path
+ fi
+
make -C "$tf_root" $make_j_opts $(cat "$tf_config_file") DEBUG="$DEBUG" BUILD_BASE=$tf_build_root V=1 "$@" \
${fip_targets:-fip} &>>"$build_log" || fail_build
)
@@ -530,6 +537,16 @@
not_upon "${QCBOR_DIR}"; then
emit_env "QCBOR_DIR" "$WORKSPACE/qcbor"
fi
+
+ # Hash verification only occurs if there is a sufficient amount of
+ # information in the event log, which is as long as EVENT_LOG_LEVEL
+ # is set to at least 20 or if it is a debug build
+ if [[ ("$(get_tf_opt MEASURED_BOOT)" -eq 1) &&
+ (($bin_mode == "debug") || ("$(get_tf_opt EVENT_LOG_LEVEL)" -ge 20)) ]]; then
+ # This variable is later exported to the expect scripts so
+ # the hashes in the TF-A event log can be verified
+ set_run_env "verify_hashes" "1"
+ fi
if [ -f "$env_file" ]; then
set -a
source "$env_file"
diff --git a/script/expect-post-runner.sh b/script/expect-post-runner.sh
index 92fff75..e429b27 100755
--- a/script/expect-post-runner.sh
+++ b/script/expect-post-runner.sh
@@ -18,6 +18,14 @@
archive="${WORKSPACE}/artefacts-lava"
+if [ $(get_run_env $archive "verify_hashes" 0) -eq 1 ]; then
+ echo "Enabling hash verification for measured boot"
+ export verify_hashes=1
+ export ci_root
+ export artefacts_dir=$archive
+ export tfa_log="${archive}/tfa_event_log"
+fi
+
# Extract UART numbering from the FVP common log using the ports script
declare -a ports=()
diff --git a/script/run_package.sh b/script/run_package.sh
index 340b3a5..60ebcb7 100755
--- a/script/run_package.sh
+++ b/script/run_package.sh
@@ -144,6 +144,11 @@
source "run/env"
fi
+if [ -v verify_hashes ]; then
+ export verify_hashes=1
+ export artefacts_dir=$run_cwd
+fi
+
# Source model environment for run
if [ -f "run/model_env" ]; then
source "run/model_env"
diff --git a/script/verify_hashes.py b/script/verify_hashes.py
new file mode 100755
index 0000000..71f11b3
--- /dev/null
+++ b/script/verify_hashes.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2025 Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script compares the hashes in the TFA event log against the files
+# generated by the build system, or in the case of the startup locality and
+# critical data, the default values.
+
+import os
+import hashlib
+from enum import Enum
+from dataclasses import dataclass
+
+# Stores identifiers in the log associated with the different images
+class ImageType(Enum):
+ UNKNOWN = "UNKNOWN"
+ BL2 = "BL_2"
+ BL31 = "SECURE_RT_EL3"
+ NT_FW_CONFIG = "NT_FW_CONFIG"
+ TB_FW_CONFIG = "TB_FW_CONFIG"
+ SOC_FW_CONFIG = "SOC_FW_CONFIG"
+ FW_CONFIG = "FW_CONFIG"
+ BL33 = "BL_33"
+ BL32 = "SECURE_RT_EL1"
+ BL32_EXTRA1 = "EXTRA1"
+ BL32_EXTRA2 = "EXTRA2"
+ STARTUP_LOCALITY = "StartupLocality"
+ CRITICAL_DATA = "CRITICAL DATA"
+
+marker_to_image_type = {
+ "BL_2" : ImageType.BL2,
+ "SECURE_RT_EL3" : ImageType.BL31,
+ "NT_FW_CONFIG" : ImageType.NT_FW_CONFIG,
+ "TB_FW_CONFIG" : ImageType.TB_FW_CONFIG,
+ "SOC_FW_CONFIG" : ImageType.SOC_FW_CONFIG,
+ "FW_CONFIG" : ImageType.FW_CONFIG,
+ "BL_33" : ImageType.BL33,
+ "StartupLocality" : ImageType.STARTUP_LOCALITY,
+ "CRITICAL DATA" : ImageType.CRITICAL_DATA,
+}
+
+class HashType(Enum):
+ UNKNOWN = "UNKNOWN"
+ SHA256 = "SHA256"
+ SHA384 = "SHA384"
+
+PCR_EVENT_MARKER = "PCR_Event2"
+ALGORITHM_MARKER = "AlgorithmId"
+DIGEST_MARKER = "Digest "
+EVENT_SIZE_MARKER = "EventSize"
+EVENT_TYPE_MARKER = "Event "
+
+BUF_SIZE = 65536
+
+# On FVPs, critical data is a hash of the non volatile registers which FVPs
+# do not alter. These registers have the below default values. They are stored
+# together in a struct which is then hashed, so this is replicated here
+TFW_NVCTR_VAL = 31
+NTFW_NVCTR_VAL = 223
+COMBINED_NVCTRS = (NTFW_NVCTR_VAL << 32) | TFW_NVCTR_VAL
+COMBINED_NVCTR_BYTES = COMBINED_NVCTRS.to_bytes(8, byteorder="little")
+
+# Need to know the location of the built files to verify their hashes
+artefacts_dir = os.environ["artefacts_dir"]
+out_file_path = f"{artefacts_dir}/tfa_event_log"
+
+# This is needed to correctly identify the files associated with BL32 and BL33
+# as the names of these can vary by test
+build_args_path = f"{artefacts_dir}/fip_build_args"
+
+# Structure:
+# - boolean for if an entry for this image has been found in the event log
+# - path to the built file
+# - hash type collected from the event log
+# - hash of the file from the event log
+@dataclass
+class ImageData:
+ found: bool
+ path: str
+ hash_type: HashType
+ event_log_hash: str
+
+ def __init__(self, initial_path: str):
+ self.found = False
+ self.path = initial_path
+ self.hash_type = HashType.UNKNOWN
+ self.event_log_hash = ""
+
+ # For convenience
+ def as_tuple(self):
+ return (self.found, self.path, self.hash_type, self.event_log_hash)
+
+# As event log entries for these images are found, their data will be stored
+# inside of the objects in this dictionary
+image_data = {
+ ImageType.BL2 : ImageData(f"{artefacts_dir}/bl2.bin"),
+ ImageType.BL31 : ImageData(f"{artefacts_dir}/bl31.bin"),
+ ImageType.FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_fw_config.dtb"),
+ ImageType.TB_FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_tb_fw_config.dtb"),
+ ImageType.NT_FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_nt_fw_config.dtb"),
+ ImageType.SOC_FW_CONFIG : ImageData(f"{artefacts_dir}/fvp_soc_fw_config.dtb"),
+ ImageType.BL33 : ImageData(""),
+ ImageType.BL32 : ImageData(""),
+ ImageType.BL32_EXTRA1 : ImageData(""),
+ ImageType.BL32_EXTRA2 : ImageData(""),
+ ImageType.STARTUP_LOCALITY : ImageData(""),
+ ImageType.CRITICAL_DATA : ImageData(""),
+}
+
+# Sometimes alternate paths are provided for some of the images used in the
+# FIP, so these need to be checked for and stored in the image_data dictionary
+def get_build_arg_paths():
+ build_data = ""
+ with open(build_args_path, 'r') as f:
+ build_data = f.read()
+
+ components = build_data.split()
+ for comp in components:
+ split_point = comp.find('=')
+ name = comp[0:split_point]
+ path_value = comp[(split_point + 1):]
+ image_type = ImageType.UNKNOWN
+ if name == "BL33":
+ image_type = ImageType.BL33
+ elif "BL32" in name:
+ if "EXTRA1" in name:
+ image_type = ImageType.BL32_EXTRA1
+ elif "EXTRA2" in name:
+ image_type = ImageType.BL32_EXTRA2
+ else:
+ image_type = ImageType.BL32
+
+ if image_type != ImageType.UNKNOWN:
+ image_data[image_type].path = path_value
+
+ # BL32 can show up as its own binary if it is not diven a different name in
+ # the build file
+ if (image_data[ImageType.BL32].path == "") and os.path.exists(f"{artefacts_dir}/bl32.bin"):
+ image_data[ImageType.BL32].path = f"{artefacts_dir}/bl32.bin"
+
+
+# Only found images should have their hashes compared
+found_images = []
+
+# Get the hash of the file stored at the given path with the specified hash
+# algorithm
+def calc_file_hash(path: str, hash_type: HashType) -> str:
+
+ if hash_type == HashType.UNKNOWN:
+ return ""
+
+ if (path == ""):
+ return "No path provided"
+
+ if not os.path.exists(path):
+ return f"No file available at path: {path}"
+
+ # Need to use this because the Docker image used for CI uses Python 3.10
+ # so hashlib.file_digest() can't be used
+ hasher = hashlib.new(hash_type.value.lower())
+ with open(path, "rb") as bin_file:
+ while True:
+ file_data = bin_file.read(BUF_SIZE)
+ if not file_data: # EOF
+ break;
+ hasher.update(file_data)
+
+ return hasher.hexdigest()
+
+# For a event log entry, extract the hash algorithm used and the hash for this# entry
+def extract_hash(line: str, tfa_event_log_file) -> (str, HashType, ImageType):
+
+ # This skips over the PCR index and event type later these lines should be
+ # parsed and used to calculate the PCR value
+ while not ALGORITHM_MARKER in line:
+ line = tfa_event_log_file.readline()
+
+ hash_type = HashType.UNKNOWN
+ for ht in HashType:
+ if ht.value in line:
+ hash_type = ht
+ break
+
+ # Early return for now if other hash type
+ if hash_type == HashType.UNKNOWN:
+ return ("", hash_type, ImageType.UNKNOWN)
+
+ # Storing lines which contain the hash characters
+ digest_lines = []
+ line = tfa_event_log_file.readline()
+ if not DIGEST_MARKER in line:
+ return ("", hash_type, ImageType.UNKNOWN)
+
+ while not EVENT_SIZE_MARKER in line:
+ digest_lines.append(line)
+ line = tfa_event_log_file.readline()
+
+ # This line contains the event type
+ line = tfa_event_log_file.readline()
+ # This will get to the first char of the name of the image
+ sep_ind = line.find(':') + 2
+ event_substr = line[sep_ind:-1]
+ image_type = ImageType.UNKNOWN
+
+ if event_substr in marker_to_image_type:
+ image_type = marker_to_image_type[event_substr]
+ elif ImageType.BL32.value in event_substr:
+ if ImageType.BL32_EXTRA1.value in event_substr:
+ image_type = ImageType.BL32_EXTRA1
+ elif ImageType.BL32_EXTRA2.value in event_substr:
+ image_type = ImageType.BL32_EXTRA2
+ else:
+ image_type = ImageType.BL32
+
+ if image_type == ImageType.UNKNOWN:
+ return ("", hash_type, ImageType.UNKNOWN)
+
+ # Know that its one of the images that we want to know the hash of so can
+ # proceed with extracting the hash
+ hash = ""
+ for digest_line in digest_lines:
+ sep_ind = digest_line.find(" : ")
+ # + 3 to skip past the separator
+ component = digest_line[sep_ind + 3:].strip().replace(' ', '')
+ hash += component
+
+ return (hash, hash_type, image_type)
+
+
+# Update image data map with paths to BL33 and BL32 binaries
+get_build_arg_paths()
+
+with open(out_file_path, "r") as tfa_event_log_file:
+ line = tfa_event_log_file.readline()
+ while len(line) > 0:
+ # Found at the start of a event log entry
+ if PCR_EVENT_MARKER in line:
+ hash, hash_type, image_type = extract_hash(line, tfa_event_log_file)
+
+ if image_type != ImageType.UNKNOWN:
+ image_data[image_type].found = True
+ image_data[image_type].hash_type = hash_type
+ image_data[image_type].event_log_hash = hash
+ found_images.append(image_type)
+
+ line = tfa_event_log_file.readline()
+
+all_match = True
+for image_type in found_images:
+ present, file_path, hash_type, event_log_hash = image_data[image_type].as_tuple()
+ comparison_hash = ""
+ if image_type == ImageType.STARTUP_LOCALITY:
+ if int(event_log_hash) == 0:
+ comparison_hash = event_log_hash
+ else:
+ comparison_hash = "0"
+
+ elif image_type == ImageType.CRITICAL_DATA:
+ hasher = hashlib.new(hash_type.value.lower())
+ hasher.update(COMBINED_NVCTR_BYTES)
+ comparison_hash = hasher.hexdigest()
+ else:
+ comparison_hash = calc_file_hash(file_path, hash_type)
+
+ print(f"{image_type.name} hash algo: {hash_type.value}")
+ print(f"Event log hash: {event_log_hash}\nComparison hash: {comparison_hash}")
+ if comparison_hash != event_log_hash:
+ print("Mismatched hashes")
+ all_match = False
+
+# These two must always be present
+if not image_data[ImageType.BL2].found:
+ print("BL2 hash not found")
+
+if not image_data[ImageType.BL31].found:
+ print("BL31 hash not found")
+
+if all_match:
+ print("All found hashes match")