Fix sp_layout.json generation

TF-A build scripts use a json configuration file (sp_layout.json) to
know which additional files need to be added to the FIP package. Each SP
will generate and install a fragment of this json file and depending on
the set of SPs to be deployed, these fragments have to be merged into a
single file.
TS has no assembly layer and thus there is no knowledge about the needed
set of SPs. As a workaround TS assumes all SPs in the install tree need
to be deployed and SP build process will merge all available json
fragments. The main benefit is decreased complexity in integration
systems.

Originally this merge was executed at cmake configuration time. This is
wrong as installation time the install directory might be different. As
a result the merged json file ends up in the binary directory of the SP
and thus remains hidden.
This change fixes the issue by moving the merging to the install step.

Signed-off-by: Gyorgy Szing <Gyorgy.Szing@arm.com>
Change-Id: Ie273c396fe81b55d94ce7f10b8cc4e9c35c0bd82
diff --git a/tools/cmake/common/ExportSp.cmake b/tools/cmake/common/ExportSp.cmake
index faa69fe..1e550ad 100644
--- a/tools/cmake/common/ExportSp.cmake
+++ b/tools/cmake/common/ExportSp.cmake
@@ -1,5 +1,5 @@
 #-------------------------------------------------------------------------------
-# Copyright (c) 2020-2022, Arm Limited and Contributors. All rights reserved.
+# Copyright (c) 2020-2023, Arm Limited and Contributors. All rights reserved.
 #
 # SPDX-License-Identifier: BSD-3-Clause
 #
@@ -92,11 +92,18 @@
 	if (DEFINED EXPORT_JSON_IN)
 		configure_file(${EXPORT_JSON_IN} ${CMAKE_CURRENT_BINARY_DIR}/${EXPORT_SP_NAME}.json @ONLY NEWLINE_STYLE UNIX)
 		install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${EXPORT_SP_NAME}.json DESTINATION ${TS_ENV}/json)
-
 		find_package(Python3 REQUIRED COMPONENTS Interpreter)
-		execute_process(COMMAND ${Python3_EXECUTABLE} ${TS_ROOT}/tools/python/merge_json.py
-				${CMAKE_INSTALL_PREFIX}/${TS_ENV}/json/sp_layout.json
-				${CMAKE_CURRENT_BINARY_DIR}/${EXPORT_SP_NAME}.json
+		# Create a cmake code fragment to merge the SP json files describing the SPs to be loaded by
+		# BL2. This is only needed when FIP packaging is used.
+		# The code fragment ensures merging is done installation time.
+		string(JOIN "\n" install_script
+			"execute_process(COMMAND ${Python3_EXECUTABLE}"
+			"	${TS_ROOT}/tools/python/merge_json.py"
+			"		-vvv"
+			"		-o \${CMAKE_INSTALL_PREFIX}/@TS_ENV@/json/sp_layout.json"
+			"		\${CMAKE_INSTALL_PREFIX}/@TS_ENV@/json/*.json)"
 		)
+		string(CONFIGURE "${install_script}" install_script @ONLY)
+		install(SCRIPT CODE "${install_script}")
 	endif()
 endfunction()
diff --git a/tools/python/merge_json.py b/tools/python/merge_json.py
index 68ad10b..79898c8 100644
--- a/tools/python/merge_json.py
+++ b/tools/python/merge_json.py
@@ -1,27 +1,125 @@
 #!/usr/bin/env python3
 # SPDX-License-Identifier: BSD-3-Clause
 #
-# Copyright (c) 2022, Arm Limited. All rights reserved.
+# Copyright (c) 2022-2023, Arm Limited. All rights reserved.
 
-"""
-Merge two json files, the second is merged into the first. If the first file
-doesn't exists yet, it will be created, along with its parent directories.
+"""Merge json files and print the result to STDOUT.
+
+Source files are specified with a list of file names. Any name in the list can
+be a glob pattern. Glob patterns act if the returned list of file names were
+passed instead. The list returned by glob is not sorted. All files are
+processed in the order being found in the argument list.
+
+Note: do not forget to quote globing patterns when running the tool from a
+      shell.
 """
 
+import argparse
+import errno
+import glob
 import json
+import logging
 import os.path
 import sys
 
-if os.path.isfile(sys.argv[1]):
-    with open(sys.argv[1], "rt", encoding="ascii") as f:
-        combined = json.load(f)
-else:
-    os.makedirs(os.path.dirname(sys.argv[1]), exist_ok=True)
-    combined = {}
+# initialize logger
+logging.getLogger('merge_json')
+logging.basicConfig(level=logging.ERROR)
 
-with open(sys.argv[2], "rt", encoding="ascii") as f:
-    current = json.load(f)
-    combined = {**combined, **current}
 
-with open(sys.argv[1], "wt", encoding="ascii") as f:
-    json.dump(combined, f, indent=4)
+def parse_arguments(args):
+    parser = argparse.ArgumentParser(
+                prog=os.path.basename(args[0]),
+                description=__doc__,
+                formatter_class=argparse.RawDescriptionHelpFormatter)
+    parser.add_argument(
+                "-e", "--exclude",
+                default=None,
+                metavar="<exclude pattern>",
+                help="Exclude files matching this pattern.")
+    parser.add_argument(
+                "-o", "--output_file",
+                default=None,
+                metavar="<path to output file>",
+                help="Write result to this file instead of STDOUT")
+    parser.add_argument(
+                "-v",
+                action='append_const', const='v',
+                metavar="<verbosity level>",
+                help="Set the amount of information printed to STDERR." +
+                     " Passing more times gives more info.")
+    parser.add_argument(
+                "-i", "--ignore-missing",
+                dest="ignore_missing",
+                action='store_const', const=True, default=False,
+                help="Ignore missing source files or source globs returning" +
+                     " empty result.")
+    parser.add_argument(
+                'source_list',
+                nargs="+",
+                metavar="<source file>",
+                help="List of source files (file name can be glob pattern).")
+    parsed_args = parser.parse_args(args[1:])
+    # Count -v arguments to logging level
+    if parsed_args.v:
+        llv = len(parsed_args.v)
+        if llv > 3:
+            llv = 3
+    else:
+        llv = 0
+    parsed_args.log_level = [logging.ERROR, logging.WARNING, logging.INFO,
+                             logging.DEBUG][llv]
+    return (parsed_args)
+
+
+def merge_files(parsed_args):
+    logger = logging.getLogger('merge_json')
+
+    logger.info(
+        "Merging " + str(parsed_args.source_list) + " to " +
+        (parsed_args.output_file if parsed_args.output_file else "STDOUT"))
+
+    result = {}
+    exclude_list = None
+
+    if parsed_args.exclude:
+        exclude_list = glob.glob(parsed_args.exclude, recursive=True)
+        if exclude_list:
+            logger.debug("Excluding files: %s" % exclude_list)
+        else:
+            logger.warning("Exclude pattern matches no files.")
+
+    for pattern in parsed_args.source_list:
+        file_list = glob.glob(pattern, recursive=True)
+        logger.debug("Globing " + pattern + " = " + str(file_list))
+        if not file_list:
+            logger.error("Pattern \"%s\" does not match any file" % pattern)
+            if not parsed_args.ignore_missing:
+                raise (FileNotFoundError(
+                        errno.ENOENT,
+                        "Pattern does not match any file",
+                        pattern))
+
+        for file in list(file_list):
+            if exclude_list and file in list(exclude_list):
+                logger.debug("excluding file " + file)
+                continue
+            logger.debug("Reding source file " + file)
+            with open(file, "rt", encoding="utf8", errors="strict") as f:
+                result.update(json.load(f))
+
+    if parsed_args.output_file is not None:
+        path = os.path.dirname(parsed_args.output_file)
+        if path:
+            os.makedirs(path, exist_ok=True)
+        with open(parsed_args.output_file, "w", encoding="utf8") as f:
+            json.dump(result, f, indent=4)
+    else:
+        print(json.dumps(result, indent=4))
+
+
+if __name__ == "__main__":
+    parsed_args = parse_arguments(sys.argv)
+    logger = logging.getLogger('merge_json')
+    logger.setLevel(parsed_args.log_level)
+    merge_files(parsed_args)